[ 538.433657] env[61974]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61974) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 538.433969] env[61974]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61974) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 538.434095] env[61974]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61974) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 538.434447] env[61974]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 538.528601] env[61974]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61974) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 538.537916] env[61974]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.009s {{(pid=61974) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 539.139963] env[61974]: INFO nova.virt.driver [None req-450011c3-e37a-47ce-a64b-14f5d80e13e4 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 539.209920] env[61974]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.210083] env[61974]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.210181] env[61974]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61974) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 542.355487] env[61974]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-def97ee3-431f-446f-b96a-f8b1e54ae4ba {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.371055] env[61974]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61974) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 542.371245] env[61974]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-33613706-fb54-4629-b4ec-a03b5cf7a5b8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.405745] env[61974]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 463d6. [ 542.405936] env[61974]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.196s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 542.406434] env[61974]: INFO nova.virt.vmwareapi.driver [None req-450011c3-e37a-47ce-a64b-14f5d80e13e4 None None] VMware vCenter version: 7.0.3 [ 542.409768] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1487c33e-fcc5-4e63-84e5-d9527e24ddea {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.426841] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d12b8411-10b2-4017-9c68-8f80c95fbc41 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.432432] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66decfcb-8e02-48b6-9658-4ac1be0474be {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.438897] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d844fd8d-08bb-470a-91d5-45ad2ea41973 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.451716] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cbd555f-3516-4034-93c0-3312e82b06b3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.457559] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ccdea3f-9eca-4271-a2f2-be9b2eca9399 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.487762] env[61974]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-b25e3498-a31e-41db-86e8-6f388e22dfdf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.492398] env[61974]: DEBUG nova.virt.vmwareapi.driver [None req-450011c3-e37a-47ce-a64b-14f5d80e13e4 None None] Extension org.openstack.compute already exists. {{(pid=61974) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:226}} [ 542.495106] env[61974]: INFO nova.compute.provider_config [None req-450011c3-e37a-47ce-a64b-14f5d80e13e4 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 542.998313] env[61974]: DEBUG nova.context [None req-450011c3-e37a-47ce-a64b-14f5d80e13e4 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),019edef8-abd7-4b81-9224-7f90926287cd(cell1) {{(pid=61974) load_cells /opt/stack/nova/nova/context.py:464}} [ 543.000324] env[61974]: DEBUG oslo_concurrency.lockutils [None req-792c599a-77cb-4096-a811-402d3022492f None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.000553] env[61974]: DEBUG oslo_concurrency.lockutils [None req-792c599a-77cb-4096-a811-402d3022492f None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.001231] env[61974]: DEBUG oslo_concurrency.lockutils [None req-792c599a-77cb-4096-a811-402d3022492f None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 543.001666] env[61974]: DEBUG oslo_concurrency.lockutils [None req-792c599a-77cb-4096-a811-402d3022492f None None] Acquiring lock "019edef8-abd7-4b81-9224-7f90926287cd" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.001860] env[61974]: DEBUG oslo_concurrency.lockutils [None req-792c599a-77cb-4096-a811-402d3022492f None None] Lock "019edef8-abd7-4b81-9224-7f90926287cd" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.002890] env[61974]: DEBUG oslo_concurrency.lockutils [None req-792c599a-77cb-4096-a811-402d3022492f None None] Lock "019edef8-abd7-4b81-9224-7f90926287cd" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 543.022725] env[61974]: INFO dbcounter [None req-792c599a-77cb-4096-a811-402d3022492f None None] Registered counter for database nova_cell0 [ 543.030801] env[61974]: INFO dbcounter [None req-792c599a-77cb-4096-a811-402d3022492f None None] Registered counter for database nova_cell1 [ 543.034285] env[61974]: DEBUG oslo_db.sqlalchemy.engines [None req-792c599a-77cb-4096-a811-402d3022492f None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61974) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 543.034634] env[61974]: DEBUG oslo_db.sqlalchemy.engines [None req-792c599a-77cb-4096-a811-402d3022492f None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61974) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 543.039472] env[61974]: ERROR nova.db.main.api [None req-792c599a-77cb-4096-a811-402d3022492f None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 543.039472] env[61974]: result = function(*args, **kwargs) [ 543.039472] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 543.039472] env[61974]: return func(*args, **kwargs) [ 543.039472] env[61974]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 543.039472] env[61974]: result = fn(*args, **kwargs) [ 543.039472] env[61974]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 543.039472] env[61974]: return f(*args, **kwargs) [ 543.039472] env[61974]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 543.039472] env[61974]: return db.service_get_minimum_version(context, binaries) [ 543.039472] env[61974]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 543.039472] env[61974]: _check_db_access() [ 543.039472] env[61974]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 543.039472] env[61974]: stacktrace = ''.join(traceback.format_stack()) [ 543.039472] env[61974]: [ 543.040484] env[61974]: ERROR nova.db.main.api [None req-792c599a-77cb-4096-a811-402d3022492f None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 543.040484] env[61974]: result = function(*args, **kwargs) [ 543.040484] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 543.040484] env[61974]: return func(*args, **kwargs) [ 543.040484] env[61974]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 543.040484] env[61974]: result = fn(*args, **kwargs) [ 543.040484] env[61974]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 543.040484] env[61974]: return f(*args, **kwargs) [ 543.040484] env[61974]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 543.040484] env[61974]: return db.service_get_minimum_version(context, binaries) [ 543.040484] env[61974]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 543.040484] env[61974]: _check_db_access() [ 543.040484] env[61974]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 543.040484] env[61974]: stacktrace = ''.join(traceback.format_stack()) [ 543.040484] env[61974]: [ 543.040885] env[61974]: WARNING nova.objects.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] Failed to get minimum service version for cell 019edef8-abd7-4b81-9224-7f90926287cd [ 543.041020] env[61974]: WARNING nova.objects.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 543.041447] env[61974]: DEBUG oslo_concurrency.lockutils [None req-792c599a-77cb-4096-a811-402d3022492f None None] Acquiring lock "singleton_lock" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 543.041612] env[61974]: DEBUG oslo_concurrency.lockutils [None req-792c599a-77cb-4096-a811-402d3022492f None None] Acquired lock "singleton_lock" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 543.041857] env[61974]: DEBUG oslo_concurrency.lockutils [None req-792c599a-77cb-4096-a811-402d3022492f None None] Releasing lock "singleton_lock" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 543.042191] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] Full set of CONF: {{(pid=61974) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 543.042340] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ******************************************************************************** {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 543.042509] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] Configuration options gathered from: {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 543.042601] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 543.042790] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 543.042956] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ================================================================================ {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 543.043139] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] allow_resize_to_same_host = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.043317] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] arq_binding_timeout = 300 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.043448] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] backdoor_port = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.043579] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] backdoor_socket = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.043744] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] block_device_allocate_retries = 60 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.043909] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] block_device_allocate_retries_interval = 3 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.044091] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cert = self.pem {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.044263] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.044433] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] compute_monitors = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.044602] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] config_dir = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.044772] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] config_drive_format = iso9660 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.044906] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.045119] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] config_source = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.045289] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] console_host = devstack {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.045461] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] control_exchange = nova {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.045624] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cpu_allocation_ratio = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.045785] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] daemon = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.045956] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] debug = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.046130] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] default_access_ip_network_name = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.046298] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] default_availability_zone = nova {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.046456] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] default_ephemeral_format = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.046617] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] default_green_pool_size = 1000 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.046858] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.047036] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] default_schedule_zone = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.047204] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] disk_allocation_ratio = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.047392] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] enable_new_services = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.047579] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] enabled_apis = ['osapi_compute'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.047748] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] enabled_ssl_apis = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.047912] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] flat_injected = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.048095] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] force_config_drive = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.048267] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] force_raw_images = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.048442] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] graceful_shutdown_timeout = 5 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.048609] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] heal_instance_info_cache_interval = 60 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.048825] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] host = cpu-1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.049014] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.049195] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] initial_disk_allocation_ratio = 1.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.049386] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] initial_ram_allocation_ratio = 1.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.049621] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.049793] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] instance_build_timeout = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.049958] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] instance_delete_interval = 300 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.050147] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] instance_format = [instance: %(uuid)s] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.050319] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] instance_name_template = instance-%08x {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.050484] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] instance_usage_audit = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.050656] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] instance_usage_audit_period = month {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.050820] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.050987] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] instances_path = /opt/stack/data/nova/instances {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.051172] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] internal_service_availability_zone = internal {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.051334] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] key = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.051497] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] live_migration_retry_count = 30 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.051667] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] log_color = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.051834] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] log_config_append = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.052010] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.052182] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] log_dir = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.052340] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] log_file = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.052470] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] log_options = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.052634] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] log_rotate_interval = 1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.052800] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] log_rotate_interval_type = days {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.052966] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] log_rotation_type = none {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.053444] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.053444] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.053444] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.053628] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.053695] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.053840] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] long_rpc_timeout = 1800 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.054010] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] max_concurrent_builds = 10 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.054183] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] max_concurrent_live_migrations = 1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.054343] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] max_concurrent_snapshots = 5 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.054502] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] max_local_block_devices = 3 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.054664] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] max_logfile_count = 30 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.054824] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] max_logfile_size_mb = 200 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.055015] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] maximum_instance_delete_attempts = 5 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.055206] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] metadata_listen = 0.0.0.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.055377] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] metadata_listen_port = 8775 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.055548] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] metadata_workers = 2 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.055713] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] migrate_max_retries = -1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.055879] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] mkisofs_cmd = genisoimage {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.056096] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] my_block_storage_ip = 10.180.1.21 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.056239] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] my_ip = 10.180.1.21 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.056407] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] network_allocate_retries = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.056587] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.056755] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] osapi_compute_listen = 0.0.0.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.056918] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] osapi_compute_listen_port = 8774 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.057099] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] osapi_compute_unique_server_name_scope = {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.057293] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] osapi_compute_workers = 2 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.057482] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] password_length = 12 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.057648] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] periodic_enable = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.057810] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] periodic_fuzzy_delay = 60 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.057982] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] pointer_model = usbtablet {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.058173] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] preallocate_images = none {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.058337] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] publish_errors = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.058470] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] pybasedir = /opt/stack/nova {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.058631] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ram_allocation_ratio = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.058794] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] rate_limit_burst = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.058964] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] rate_limit_except_level = CRITICAL {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.059143] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] rate_limit_interval = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.059307] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] reboot_timeout = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.059467] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] reclaim_instance_interval = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.059626] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] record = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.059797] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] reimage_timeout_per_gb = 60 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.059967] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] report_interval = 120 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.060147] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] rescue_timeout = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.060311] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] reserved_host_cpus = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.060473] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] reserved_host_disk_mb = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.060633] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] reserved_host_memory_mb = 512 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.060795] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] reserved_huge_pages = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.060959] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] resize_confirm_window = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.061139] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] resize_fs_using_block_device = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.061298] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] resume_guests_state_on_host_boot = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.061466] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.061627] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] rpc_response_timeout = 60 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.061787] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] run_external_periodic_tasks = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.061954] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] running_deleted_instance_action = reap {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.062131] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] running_deleted_instance_poll_interval = 1800 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.062293] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] running_deleted_instance_timeout = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.062452] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] scheduler_instance_sync_interval = 120 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.062630] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] service_down_time = 720 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.062791] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] servicegroup_driver = db {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.062945] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] shell_completion = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.063124] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] shelved_offload_time = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.063287] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] shelved_poll_interval = 3600 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.063457] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] shutdown_timeout = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.063617] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] source_is_ipv6 = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.063777] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ssl_only = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.064065] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.064200] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] sync_power_state_interval = 600 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.064364] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] sync_power_state_pool_size = 1000 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.064532] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] syslog_log_facility = LOG_USER {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.064690] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] tempdir = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.064850] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] timeout_nbd = 10 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.065058] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] transport_url = **** {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.065241] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] update_resources_interval = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.065405] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] use_cow_images = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.065567] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] use_eventlog = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.065726] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] use_journal = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.065883] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] use_json = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.066068] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] use_rootwrap_daemon = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.066250] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] use_stderr = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.066413] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] use_syslog = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.066570] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vcpu_pin_set = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.066743] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vif_plugging_is_fatal = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.066913] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vif_plugging_timeout = 300 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.067095] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] virt_mkfs = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.067278] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] volume_usage_poll_interval = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.067466] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] watch_log_file = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.067641] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] web = /usr/share/spice-html5 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 543.067827] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.067998] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.068200] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.068408] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_concurrency.disable_process_locking = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.068959] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.069176] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.069353] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.069532] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.069708] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.069880] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.070080] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.auth_strategy = keystone {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.070261] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.compute_link_prefix = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.070440] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.070616] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.dhcp_domain = novalocal {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.070787] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.enable_instance_password = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.070956] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.glance_link_prefix = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.071144] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.071321] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.071488] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.instance_list_per_project_cells = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.071652] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.list_records_by_skipping_down_cells = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.071816] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.local_metadata_per_cell = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.071987] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.max_limit = 1000 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.072179] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.metadata_cache_expiration = 15 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.072355] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.neutron_default_tenant_id = default {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.072528] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.response_validation = warn {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.072700] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.use_neutron_default_nets = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.072875] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.073050] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.073230] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.073401] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.073576] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.vendordata_dynamic_targets = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.073744] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.vendordata_jsonfile_path = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.073930] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.074174] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.backend = dogpile.cache.memcached {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.074312] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.backend_argument = **** {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.074485] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.config_prefix = cache.oslo {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.074658] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.dead_timeout = 60.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.074826] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.debug_cache_backend = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.075030] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.enable_retry_client = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.075225] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.enable_socket_keepalive = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.075402] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.enabled = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.075570] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.enforce_fips_mode = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.075736] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.expiration_time = 600 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.075902] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.hashclient_retry_attempts = 2 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.076106] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.hashclient_retry_delay = 1.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.076292] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.memcache_dead_retry = 300 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.076455] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.memcache_password = **** {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.076623] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.076789] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.076953] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.memcache_pool_maxsize = 10 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.077135] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.077327] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.memcache_sasl_enabled = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.077526] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.077698] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.memcache_socket_timeout = 1.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.077860] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.memcache_username = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.078041] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.proxies = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.078216] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.redis_db = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.078378] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.redis_password = **** {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.078549] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.redis_sentinel_service_name = mymaster {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.078726] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.078895] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.redis_server = localhost:6379 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.079075] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.redis_socket_timeout = 1.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.079242] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.redis_username = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.079407] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.retry_attempts = 2 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.079575] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.retry_delay = 0.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.079739] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.socket_keepalive_count = 1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.079901] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.socket_keepalive_idle = 1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.080079] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.socket_keepalive_interval = 1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.080260] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.tls_allowed_ciphers = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.080454] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.tls_cafile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.080620] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.tls_certfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.080786] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.tls_enabled = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.080947] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cache.tls_keyfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.081140] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cinder.auth_section = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.081322] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cinder.auth_type = password {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.081488] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cinder.cafile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.081664] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cinder.catalog_info = volumev3::publicURL {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.081827] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cinder.certfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.081992] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cinder.collect_timing = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.082173] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cinder.cross_az_attach = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.082339] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cinder.debug = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.082504] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cinder.endpoint_template = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.082672] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cinder.http_retries = 3 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.082836] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cinder.insecure = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.083007] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cinder.keyfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.083190] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cinder.os_region_name = RegionOne {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.083359] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cinder.split_loggers = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.083519] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cinder.timeout = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.083693] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.083855] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] compute.cpu_dedicated_set = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.084021] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] compute.cpu_shared_set = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.084193] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] compute.image_type_exclude_list = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.084362] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.084526] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] compute.max_concurrent_disk_ops = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.084688] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] compute.max_disk_devices_to_attach = -1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.084849] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.085055] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.085240] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] compute.resource_provider_association_refresh = 300 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.085407] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.085571] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] compute.shutdown_retry_interval = 10 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.085751] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.085936] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] conductor.workers = 2 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.086152] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] console.allowed_origins = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.086330] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] console.ssl_ciphers = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.086507] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] console.ssl_minimum_version = default {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.086679] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] consoleauth.enforce_session_timeout = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.086851] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] consoleauth.token_ttl = 600 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.087038] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cyborg.cafile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.087206] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cyborg.certfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.087410] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cyborg.collect_timing = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.087585] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cyborg.connect_retries = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.087750] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cyborg.connect_retry_delay = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.087912] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cyborg.endpoint_override = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.088089] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cyborg.insecure = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.088258] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cyborg.keyfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.088419] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cyborg.max_version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.088578] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cyborg.min_version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.088737] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cyborg.region_name = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.088895] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cyborg.retriable_status_codes = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.089067] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cyborg.service_name = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.089245] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cyborg.service_type = accelerator {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.089406] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cyborg.split_loggers = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.089565] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cyborg.status_code_retries = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.089724] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cyborg.status_code_retry_delay = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.089908] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cyborg.timeout = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.090079] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.090250] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] cyborg.version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.090430] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] database.backend = sqlalchemy {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.090602] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] database.connection = **** {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.090772] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] database.connection_debug = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.090943] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] database.connection_parameters = {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.091124] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] database.connection_recycle_time = 3600 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.091291] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] database.connection_trace = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.091451] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] database.db_inc_retry_interval = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.091615] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] database.db_max_retries = 20 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.091777] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] database.db_max_retry_interval = 10 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.091941] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] database.db_retry_interval = 1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.092130] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] database.max_overflow = 50 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.092297] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] database.max_pool_size = 5 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.092501] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] database.max_retries = 10 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.092679] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.092841] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] database.mysql_wsrep_sync_wait = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.093092] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] database.pool_timeout = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.093180] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] database.retry_interval = 10 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.093336] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] database.slave_connection = **** {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.093500] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] database.sqlite_synchronous = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.093662] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] database.use_db_reconnect = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.093843] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api_database.backend = sqlalchemy {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.094065] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api_database.connection = **** {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.094198] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api_database.connection_debug = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.094370] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api_database.connection_parameters = {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.094538] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api_database.connection_recycle_time = 3600 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.094703] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api_database.connection_trace = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.094866] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api_database.db_inc_retry_interval = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.095204] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api_database.db_max_retries = 20 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.095261] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api_database.db_max_retry_interval = 10 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.095407] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api_database.db_retry_interval = 1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.095632] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api_database.max_overflow = 50 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.095734] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api_database.max_pool_size = 5 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.095898] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api_database.max_retries = 10 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.096101] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.096281] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.096448] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api_database.pool_timeout = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.096614] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api_database.retry_interval = 10 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.096777] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api_database.slave_connection = **** {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.096943] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] api_database.sqlite_synchronous = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.097138] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] devices.enabled_mdev_types = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.097348] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.097537] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ephemeral_storage_encryption.default_format = luks {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.097707] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ephemeral_storage_encryption.enabled = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.097877] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.098069] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.api_servers = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.098244] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.cafile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.098411] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.certfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.098578] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.collect_timing = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.098742] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.connect_retries = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.098904] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.connect_retry_delay = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.099079] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.debug = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.099254] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.default_trusted_certificate_ids = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.099420] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.enable_certificate_validation = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.099585] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.enable_rbd_download = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.099746] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.endpoint_override = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.099912] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.insecure = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.100090] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.keyfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.100261] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.max_version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.100419] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.min_version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.100585] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.num_retries = 3 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.100757] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.rbd_ceph_conf = {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.100921] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.rbd_connect_timeout = 5 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.101103] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.rbd_pool = {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.101278] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.rbd_user = {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.101441] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.region_name = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.101602] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.retriable_status_codes = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.101762] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.service_name = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.101931] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.service_type = image {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.102111] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.split_loggers = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.102281] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.status_code_retries = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.102442] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.status_code_retry_delay = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.102603] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.timeout = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.102786] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.102950] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.verify_glance_signatures = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.103188] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] glance.version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.103298] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] guestfs.debug = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.103466] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] mks.enabled = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.103824] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.104029] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] image_cache.manager_interval = 2400 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.104210] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] image_cache.precache_concurrency = 1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.104408] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] image_cache.remove_unused_base_images = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.104608] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.104785] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.104982] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] image_cache.subdirectory_name = _base {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.105200] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.api_max_retries = 60 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.105372] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.api_retry_interval = 2 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.105536] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.auth_section = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.105702] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.auth_type = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.105864] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.cafile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.106037] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.certfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.106210] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.collect_timing = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.106376] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.conductor_group = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.106535] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.connect_retries = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.106695] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.connect_retry_delay = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.106853] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.endpoint_override = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.107067] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.insecure = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.107208] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.keyfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.107387] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.max_version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.107564] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.min_version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.107733] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.peer_list = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.107895] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.region_name = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.108073] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.retriable_status_codes = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.108244] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.serial_console_state_timeout = 10 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.108403] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.service_name = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.108571] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.service_type = baremetal {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.108732] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.shard = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.108897] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.split_loggers = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.109073] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.status_code_retries = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.109239] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.status_code_retry_delay = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.109398] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.timeout = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.109581] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.109744] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ironic.version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.109928] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.110118] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] key_manager.fixed_key = **** {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.110308] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.110470] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican.barbican_api_version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.110631] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican.barbican_endpoint = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.110803] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican.barbican_endpoint_type = public {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.110962] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican.barbican_region_name = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.111139] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican.cafile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.111303] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican.certfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.111467] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican.collect_timing = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.111629] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican.insecure = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.111788] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican.keyfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.111953] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican.number_of_retries = 60 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.112131] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican.retry_delay = 1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.112298] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican.send_service_user_token = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.112461] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican.split_loggers = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.112619] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican.timeout = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.112781] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican.verify_ssl = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.112939] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican.verify_ssl_path = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.113120] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican_service_user.auth_section = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.113300] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican_service_user.auth_type = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.113444] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican_service_user.cafile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.113601] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican_service_user.certfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.113764] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican_service_user.collect_timing = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.113925] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican_service_user.insecure = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.114095] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican_service_user.keyfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.114292] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican_service_user.split_loggers = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.114423] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] barbican_service_user.timeout = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.114590] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vault.approle_role_id = **** {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.114748] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vault.approle_secret_id = **** {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.114922] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vault.kv_mountpoint = secret {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.115117] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vault.kv_path = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.115292] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vault.kv_version = 2 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.115456] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vault.namespace = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.115617] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vault.root_token_id = **** {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.115776] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vault.ssl_ca_crt_file = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.115945] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vault.timeout = 60.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.116166] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vault.use_ssl = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.116357] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.116535] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.auth_section = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.116702] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.auth_type = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.116864] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.cafile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.117036] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.certfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.117212] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.collect_timing = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.117412] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.connect_retries = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.117586] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.connect_retry_delay = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.117748] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.endpoint_override = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.117911] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.insecure = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.118084] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.keyfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.118250] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.max_version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.118410] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.min_version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.118569] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.region_name = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.118729] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.retriable_status_codes = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.118886] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.service_name = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.119069] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.service_type = identity {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.119239] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.split_loggers = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.119424] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.status_code_retries = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.119605] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.status_code_retry_delay = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.119766] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.timeout = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.119949] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.120130] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] keystone.version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.120337] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.connection_uri = {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.120502] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.cpu_mode = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.120670] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.cpu_model_extra_flags = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.120841] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.cpu_models = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.121021] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.cpu_power_governor_high = performance {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.121194] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.cpu_power_governor_low = powersave {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.121362] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.cpu_power_management = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.121532] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.121701] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.device_detach_attempts = 8 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.121864] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.device_detach_timeout = 20 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.122044] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.disk_cachemodes = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.122212] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.disk_prefix = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.122379] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.enabled_perf_events = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.122544] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.file_backed_memory = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.122712] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.gid_maps = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.122872] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.hw_disk_discard = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.123044] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.hw_machine_type = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.123224] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.images_rbd_ceph_conf = {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.123397] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.123561] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.123729] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.images_rbd_glance_store_name = {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.123896] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.images_rbd_pool = rbd {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.124088] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.images_type = default {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.124255] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.images_volume_group = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.124419] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.inject_key = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.124581] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.inject_partition = -2 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.124742] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.inject_password = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.124913] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.iscsi_iface = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.125111] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.iser_use_multipath = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.125289] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.live_migration_bandwidth = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.125457] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.125621] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.live_migration_downtime = 500 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.125785] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.125948] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.126146] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.live_migration_inbound_addr = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.126320] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.126490] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.live_migration_permit_post_copy = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.126653] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.live_migration_scheme = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.126836] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.live_migration_timeout_action = abort {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.127011] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.live_migration_tunnelled = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.127184] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.live_migration_uri = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.127376] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.live_migration_with_native_tls = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.127553] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.max_queues = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.127719] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.127968] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.128190] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.nfs_mount_options = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.128506] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.128685] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.128856] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.num_iser_scan_tries = 5 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.129029] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.num_memory_encrypted_guests = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.129204] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.129369] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.num_pcie_ports = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.129537] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.num_volume_scan_tries = 5 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.129705] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.pmem_namespaces = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.129866] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.quobyte_client_cfg = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.130185] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.130369] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.rbd_connect_timeout = 5 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.130539] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.130706] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.130869] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.rbd_secret_uuid = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.131040] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.rbd_user = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.131215] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.131411] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.remote_filesystem_transport = ssh {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.131601] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.rescue_image_id = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.131767] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.rescue_kernel_id = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.131930] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.rescue_ramdisk_id = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.132122] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.132284] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.rx_queue_size = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.132454] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.smbfs_mount_options = {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.132736] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.132912] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.snapshot_compression = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.133088] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.snapshot_image_format = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.133316] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.133494] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.sparse_logical_volumes = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.133650] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.swtpm_enabled = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.133821] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.swtpm_group = tss {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.133990] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.swtpm_user = tss {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.134180] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.sysinfo_serial = unique {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.134344] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.tb_cache_size = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.134512] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.tx_queue_size = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.134672] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.uid_maps = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.134836] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.use_virtio_for_bridges = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.135046] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.virt_type = kvm {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.135240] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.volume_clear = zero {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.135408] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.volume_clear_size = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.135579] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.volume_use_multipath = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.135741] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.vzstorage_cache_path = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.135911] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.136109] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.vzstorage_mount_group = qemu {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.136293] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.vzstorage_mount_opts = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.136465] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.136744] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.136923] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.vzstorage_mount_user = stack {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.137105] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.137312] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.auth_section = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.137508] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.auth_type = password {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.137673] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.cafile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.137837] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.certfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.138023] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.collect_timing = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.138187] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.connect_retries = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.138352] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.connect_retry_delay = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.138523] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.default_floating_pool = public {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.138685] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.endpoint_override = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.138852] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.extension_sync_interval = 600 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.139026] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.http_retries = 3 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.139205] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.insecure = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.139450] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.keyfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.139730] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.max_version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.140044] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.140342] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.min_version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.140627] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.ovs_bridge = br-int {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.140890] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.physnets = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.141173] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.region_name = RegionOne {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.141438] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.retriable_status_codes = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.141709] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.service_metadata_proxy = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.141994] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.service_name = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.142317] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.service_type = network {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.142611] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.split_loggers = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.142901] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.status_code_retries = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.143211] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.status_code_retry_delay = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.143507] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.timeout = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.143816] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.144119] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] neutron.version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.144400] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] notifications.bdms_in_notifications = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.144693] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] notifications.default_level = INFO {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.145016] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] notifications.notification_format = unversioned {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.145320] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] notifications.notify_on_state_change = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.145638] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.145953] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] pci.alias = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.146282] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] pci.device_spec = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.146582] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] pci.report_in_placement = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.146888] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.auth_section = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.147227] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.auth_type = password {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.147550] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.147851] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.cafile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.148169] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.certfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.148475] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.collect_timing = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.148783] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.connect_retries = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.149103] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.connect_retry_delay = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.149407] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.default_domain_id = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.149711] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.default_domain_name = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.150030] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.domain_id = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.150349] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.domain_name = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.150659] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.endpoint_override = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.150967] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.insecure = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.151287] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.keyfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.151579] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.max_version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.151823] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.min_version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.152074] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.password = **** {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.152307] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.project_domain_id = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.152542] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.project_domain_name = Default {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.152775] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.project_id = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.153052] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.project_name = service {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.153318] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.region_name = RegionOne {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.153574] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.retriable_status_codes = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.153822] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.service_name = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.154084] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.service_type = placement {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.154321] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.split_loggers = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.154563] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.status_code_retries = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.154833] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.status_code_retry_delay = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.155106] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.system_scope = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.155428] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.timeout = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.155633] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.trust_id = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.155900] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.user_domain_id = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.156205] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.user_domain_name = Default {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.156484] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.user_id = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.156776] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.username = nova {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.157090] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.157371] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] placement.version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.157665] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] quota.cores = 20 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.157942] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] quota.count_usage_from_placement = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.158250] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.158556] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] quota.injected_file_content_bytes = 10240 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.158824] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] quota.injected_file_path_length = 255 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.159104] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] quota.injected_files = 5 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.159378] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] quota.instances = 10 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.159638] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] quota.key_pairs = 100 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.159899] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] quota.metadata_items = 128 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.160170] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] quota.ram = 51200 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.160422] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] quota.recheck_quota = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.160674] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] quota.server_group_members = 10 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.160923] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] quota.server_groups = 10 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.161194] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.161443] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.161686] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] scheduler.image_metadata_prefilter = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.161930] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.162207] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] scheduler.max_attempts = 3 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.162453] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] scheduler.max_placement_results = 1000 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.162698] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.162941] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] scheduler.query_placement_for_image_type_support = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.163210] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.163473] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] scheduler.workers = 2 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.163730] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.163987] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.164276] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.164532] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.164778] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.165040] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.165297] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.165587] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.165839] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.host_subset_size = 1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.166100] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.166348] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.166589] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.166834] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.isolated_hosts = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.167093] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.isolated_images = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.167345] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.167585] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.167838] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.168095] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.pci_in_placement = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.168347] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.168589] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.168830] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.169083] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.169332] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.169577] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.169813] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.track_instance_changes = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.170086] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.170344] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] metrics.required = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.170588] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] metrics.weight_multiplier = 1.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.170831] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.171096] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] metrics.weight_setting = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.171557] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.171815] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] serial_console.enabled = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.172091] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] serial_console.port_range = 10000:20000 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.172352] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.172604] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.172851] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] serial_console.serialproxy_port = 6083 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.173115] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] service_user.auth_section = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.173380] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] service_user.auth_type = password {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.173619] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] service_user.cafile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.173853] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] service_user.certfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.174117] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] service_user.collect_timing = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.174367] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] service_user.insecure = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.174604] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] service_user.keyfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.174857] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] service_user.send_service_user_token = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.175125] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] service_user.split_loggers = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.175387] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] service_user.timeout = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.175864] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] spice.agent_enabled = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.175864] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] spice.enabled = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.176340] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.176613] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.176799] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] spice.html5proxy_port = 6082 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.176965] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] spice.image_compression = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.177146] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] spice.jpeg_compression = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.177315] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] spice.playback_compression = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.177482] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] spice.require_secure = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.177654] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] spice.server_listen = 127.0.0.1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.177827] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.177997] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] spice.streaming_mode = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.178171] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] spice.zlib_compression = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.178342] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] upgrade_levels.baseapi = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.178514] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] upgrade_levels.compute = auto {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.178677] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] upgrade_levels.conductor = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.178838] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] upgrade_levels.scheduler = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.179023] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vendordata_dynamic_auth.auth_section = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.179196] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vendordata_dynamic_auth.auth_type = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.179357] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vendordata_dynamic_auth.cafile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.179526] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vendordata_dynamic_auth.certfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.179692] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.179856] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vendordata_dynamic_auth.insecure = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.180027] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vendordata_dynamic_auth.keyfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.180211] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.180359] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vendordata_dynamic_auth.timeout = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.180534] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.api_retry_count = 10 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.180700] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.ca_file = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.180873] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.cache_prefix = devstack-image-cache {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.181052] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.cluster_name = testcl1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.181227] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.connection_pool_size = 10 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.181390] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.console_delay_seconds = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.181560] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.datastore_regex = ^datastore.* {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.181782] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.181958] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.host_password = **** {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.182144] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.host_port = 443 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.182319] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.host_username = administrator@vsphere.local {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.182489] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.insecure = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.182654] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.integration_bridge = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.182822] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.maximum_objects = 100 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.182981] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.pbm_default_policy = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.183159] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.pbm_enabled = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.183318] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.pbm_wsdl_location = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.183486] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.183646] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.serial_port_proxy_uri = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.183804] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.serial_port_service_uri = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.183969] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.task_poll_interval = 0.5 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.184161] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.use_linked_clone = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.184330] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.vnc_keymap = en-us {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.184496] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.vnc_port = 5900 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.184661] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vmware.vnc_port_total = 10000 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.184850] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vnc.auth_schemes = ['none'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.185046] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vnc.enabled = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.185363] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.185550] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.185724] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vnc.novncproxy_port = 6080 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.185903] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vnc.server_listen = 127.0.0.1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.186088] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.186259] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vnc.vencrypt_ca_certs = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.186420] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vnc.vencrypt_client_cert = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.186578] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vnc.vencrypt_client_key = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.186753] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.186917] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] workarounds.disable_deep_image_inspection = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.187092] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.187260] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.187423] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.187585] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] workarounds.disable_rootwrap = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.187745] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] workarounds.enable_numa_live_migration = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.187905] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.188078] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.188246] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.188408] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] workarounds.libvirt_disable_apic = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.188567] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.188731] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.188893] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.189065] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.189231] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.189393] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.189551] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.189709] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.189869] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.190044] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.190237] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.190409] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] wsgi.client_socket_timeout = 900 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.190579] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] wsgi.default_pool_size = 1000 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.190746] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] wsgi.keep_alive = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.190914] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] wsgi.max_header_line = 16384 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.191091] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] wsgi.secure_proxy_ssl_header = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.191258] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] wsgi.ssl_ca_file = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.191420] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] wsgi.ssl_cert_file = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.191582] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] wsgi.ssl_key_file = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.191750] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] wsgi.tcp_keepidle = 600 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.191930] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.192115] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] zvm.ca_file = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.192281] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] zvm.cloud_connector_url = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.192591] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.192767] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] zvm.reachable_timeout = 300 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.192953] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_policy.enforce_new_defaults = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.193374] env[61974]: WARNING oslo_config.cfg [None req-792c599a-77cb-4096-a811-402d3022492f None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 543.193559] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_policy.enforce_scope = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.193739] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_policy.policy_default_rule = default {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.193923] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.194113] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_policy.policy_file = policy.yaml {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.194290] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.194456] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.194618] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.194779] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.194983] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.195145] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.195324] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.195504] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] profiler.connection_string = messaging:// {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.195674] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] profiler.enabled = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.195845] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] profiler.es_doc_type = notification {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.196018] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] profiler.es_scroll_size = 10000 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.196193] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] profiler.es_scroll_time = 2m {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.196361] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] profiler.filter_error_trace = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.196529] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] profiler.hmac_keys = **** {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.196701] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] profiler.sentinel_service_name = mymaster {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.196871] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] profiler.socket_timeout = 0.1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.197052] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] profiler.trace_requests = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.197223] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] profiler.trace_sqlalchemy = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.197409] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] profiler_jaeger.process_tags = {} {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.197572] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] profiler_jaeger.service_name_prefix = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.197734] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] profiler_otlp.service_name_prefix = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.197901] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] remote_debug.host = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.198078] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] remote_debug.port = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.198265] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.198428] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.198594] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.198758] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.198920] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.199098] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.199266] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.199432] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.199596] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.199770] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.199933] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.200121] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.200296] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.200468] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.200640] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.200809] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.200976] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.201168] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.201336] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.201502] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.201674] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.201843] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.202016] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.202197] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.202365] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.202527] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.202691] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.202853] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.203033] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.203209] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.ssl = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.203384] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.203556] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.203723] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.203897] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.204092] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.ssl_version = {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.204268] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.204458] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.204627] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_notifications.retry = -1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.204814] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.205008] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_messaging_notifications.transport_url = **** {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.205200] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.auth_section = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.205389] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.auth_type = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.205528] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.cafile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.205687] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.certfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.205850] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.collect_timing = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.206022] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.connect_retries = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.206183] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.connect_retry_delay = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.206344] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.endpoint_id = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.206505] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.endpoint_override = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.206667] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.insecure = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.206826] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.keyfile = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.206987] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.max_version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.207161] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.min_version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.207323] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.region_name = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.207485] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.retriable_status_codes = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.207645] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.service_name = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.207803] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.service_type = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.207966] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.split_loggers = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.208144] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.status_code_retries = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.208306] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.status_code_retry_delay = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.208465] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.timeout = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.208624] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.valid_interfaces = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.208781] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_limit.version = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.208948] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_reports.file_event_handler = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.209130] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.209293] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] oslo_reports.log_dir = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.209465] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.209625] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.209783] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.209951] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.210132] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.210293] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.210465] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.210627] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vif_plug_ovs_privileged.group = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.210787] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.210953] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.211132] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.211292] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] vif_plug_ovs_privileged.user = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.211467] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] os_vif_linux_bridge.flat_interface = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.211650] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.211824] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.211999] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.212194] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.212368] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.212538] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.212702] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.212883] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.213070] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] os_vif_ovs.isolate_vif = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.213247] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.213418] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.213590] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.213760] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] os_vif_ovs.ovsdb_interface = native {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.213925] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] os_vif_ovs.per_port_bridge = False {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.214111] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] privsep_osbrick.capabilities = [21] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.214278] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] privsep_osbrick.group = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.214527] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] privsep_osbrick.helper_command = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.214604] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.214761] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.214920] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] privsep_osbrick.user = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.215111] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.215277] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] nova_sys_admin.group = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.215440] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] nova_sys_admin.helper_command = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.215601] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.215763] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.215919] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] nova_sys_admin.user = None {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 543.216060] env[61974]: DEBUG oslo_service.service [None req-792c599a-77cb-4096-a811-402d3022492f None None] ******************************************************************************** {{(pid=61974) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 543.216574] env[61974]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 543.719652] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Getting list of instances from cluster (obj){ [ 543.719652] env[61974]: value = "domain-c8" [ 543.719652] env[61974]: _type = "ClusterComputeResource" [ 543.719652] env[61974]: } {{(pid=61974) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 543.720901] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03303c30-3f88-4bcf-a4d8-5d9214c1f770 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.729968] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Got total of 0 instances {{(pid=61974) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 543.730622] env[61974]: WARNING nova.virt.vmwareapi.driver [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 543.730765] env[61974]: INFO nova.virt.node [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Generated node identity 81f72dd1-35ef-4b87-b120-a6ea5ab8608a [ 543.730994] env[61974]: INFO nova.virt.node [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Wrote node identity 81f72dd1-35ef-4b87-b120-a6ea5ab8608a to /opt/stack/data/n-cpu-1/compute_id [ 544.233664] env[61974]: WARNING nova.compute.manager [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Compute nodes ['81f72dd1-35ef-4b87-b120-a6ea5ab8608a'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 545.239454] env[61974]: INFO nova.compute.manager [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 546.247068] env[61974]: WARNING nova.compute.manager [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 546.247068] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.247068] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.247068] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.247068] env[61974]: DEBUG nova.compute.resource_tracker [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 546.247068] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592d99f4-b4f0-48d1-aba7-1d053eee53d3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.255175] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f892dd-6892-4057-aa92-700593a8f79a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.268798] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb654130-d480-430a-abe8-a13c864e5593 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.278549] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b44f153-440d-4fe0-9211-3a981968c653 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.307730] env[61974]: DEBUG nova.compute.resource_tracker [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181209MB free_disk=178GB free_vcpus=48 pci_devices=None {{(pid=61974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 546.308030] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.308372] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.812762] env[61974]: WARNING nova.compute.resource_tracker [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] No compute node record for cpu-1:81f72dd1-35ef-4b87-b120-a6ea5ab8608a: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 81f72dd1-35ef-4b87-b120-a6ea5ab8608a could not be found. [ 547.316327] env[61974]: INFO nova.compute.resource_tracker [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a [ 548.825878] env[61974]: DEBUG nova.compute.resource_tracker [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 548.825878] env[61974]: DEBUG nova.compute.resource_tracker [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 548.976419] env[61974]: INFO nova.scheduler.client.report [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] [req-7ac29ce9-35e1-4dfc-b029-8110cf9db2c6] Created resource provider record via placement API for resource provider with UUID 81f72dd1-35ef-4b87-b120-a6ea5ab8608a and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 548.995132] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4cfdd1-516d-404c-9915-0bda038089b1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.001758] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef2aea7-6ad3-438b-956c-0a6d3d7ad39b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.031134] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850e8bf2-a165-4391-88fb-ff929894eeb2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.039027] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb9bd0aa-8d53-459c-9829-e540c9e95a28 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.051529] env[61974]: DEBUG nova.compute.provider_tree [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 549.587876] env[61974]: DEBUG nova.scheduler.client.report [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Updated inventory for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 549.587876] env[61974]: DEBUG nova.compute.provider_tree [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Updating resource provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a generation from 0 to 1 during operation: update_inventory {{(pid=61974) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 549.587876] env[61974]: DEBUG nova.compute.provider_tree [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 549.641076] env[61974]: DEBUG nova.compute.provider_tree [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Updating resource provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a generation from 1 to 2 during operation: update_traits {{(pid=61974) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 550.145841] env[61974]: DEBUG nova.compute.resource_tracker [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 550.145841] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.835s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.145841] env[61974]: DEBUG nova.service [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Creating RPC server for service compute {{(pid=61974) start /opt/stack/nova/nova/service.py:186}} [ 550.160229] env[61974]: DEBUG nova.service [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] Join ServiceGroup membership for this service compute {{(pid=61974) start /opt/stack/nova/nova/service.py:203}} [ 550.160229] env[61974]: DEBUG nova.servicegroup.drivers.db [None req-0580fb3f-bed7-46e1-977c-6dd01633d38a None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61974) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 585.915350] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Acquiring lock "811de811-d683-44b4-9a25-33923f235e3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.915895] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Lock "811de811-d683-44b4-9a25-33923f235e3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.430298] env[61974]: DEBUG nova.compute.manager [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 586.820788] env[61974]: DEBUG oslo_concurrency.lockutils [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Acquiring lock "a054a98f-e7b4-422c-bd5b-4e478f4a94d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.821209] env[61974]: DEBUG oslo_concurrency.lockutils [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Lock "a054a98f-e7b4-422c-bd5b-4e478f4a94d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.984751] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.984751] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.990304] env[61974]: INFO nova.compute.claims [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 587.327620] env[61974]: DEBUG nova.compute.manager [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 587.868298] env[61974]: DEBUG oslo_concurrency.lockutils [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.069490] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be937ee-003b-439f-8fd2-4a46ae695184 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.078387] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f71c32-e11b-4084-8227-15188333d4c1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.121479] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c2cddd-4ded-4359-ad84-a3c348a1b25b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.130475] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1b378d6-fa3a-4e7d-92d9-645e0aaee9c7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.157355] env[61974]: DEBUG nova.compute.provider_tree [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 588.666177] env[61974]: DEBUG nova.scheduler.client.report [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 589.171629] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.186s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.172947] env[61974]: DEBUG nova.compute.manager [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 589.175423] env[61974]: DEBUG oslo_concurrency.lockutils [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.307s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.176840] env[61974]: INFO nova.compute.claims [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 589.515448] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Acquiring lock "b637fed5-951c-4e8e-95ae-410e1ec3ecc8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.516286] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Lock "b637fed5-951c-4e8e-95ae-410e1ec3ecc8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.686751] env[61974]: DEBUG nova.compute.utils [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 589.689350] env[61974]: DEBUG nova.compute.manager [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 589.691603] env[61974]: DEBUG nova.network.neutron [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 590.022290] env[61974]: DEBUG nova.compute.manager [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 590.190413] env[61974]: DEBUG nova.compute.manager [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 590.218032] env[61974]: DEBUG nova.policy [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '468bf17d7d5643fa9b4587b7ce5df7d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fe4c34c59393476bb016bd09ed45164a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 590.319464] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8f8c9a-1dd4-4a61-af17-575532afd4fc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.328014] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ddce7e-aa17-42e6-93e4-bf920603ae2a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.362956] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f904222-7a6a-4ac0-8556-9d4f8da1a9e9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.369164] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Acquiring lock "b15f2e60-9ea6-49ea-be71-6770d3f48e1d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.369407] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Lock "b15f2e60-9ea6-49ea-be71-6770d3f48e1d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.376439] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-458ae119-9797-4728-8c19-b45df2e3a8f7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.396852] env[61974]: DEBUG nova.compute.provider_tree [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 590.553997] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.872175] env[61974]: DEBUG nova.compute.manager [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 590.901282] env[61974]: DEBUG nova.scheduler.client.report [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 591.091624] env[61974]: DEBUG nova.network.neutron [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Successfully created port: 1c9380de-b680-410d-a930-68db86f0cdbd {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 591.208190] env[61974]: DEBUG nova.compute.manager [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 591.246389] env[61974]: DEBUG nova.virt.hardware [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 591.246648] env[61974]: DEBUG nova.virt.hardware [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 591.246800] env[61974]: DEBUG nova.virt.hardware [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 591.250037] env[61974]: DEBUG nova.virt.hardware [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 591.250321] env[61974]: DEBUG nova.virt.hardware [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 591.250486] env[61974]: DEBUG nova.virt.hardware [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 591.250731] env[61974]: DEBUG nova.virt.hardware [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 591.250894] env[61974]: DEBUG nova.virt.hardware [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 591.251302] env[61974]: DEBUG nova.virt.hardware [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 591.251475] env[61974]: DEBUG nova.virt.hardware [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 591.251658] env[61974]: DEBUG nova.virt.hardware [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 591.252559] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ac8a89-ae97-428b-a538-5afa038e1eef {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.261571] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9c25f5-7feb-45e2-9a38-2906691ace81 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.279919] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4dd0640-a133-4e2f-a051-39e8656aba23 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.405435] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.405435] env[61974]: DEBUG oslo_concurrency.lockutils [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.230s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.406037] env[61974]: DEBUG nova.compute.manager [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 591.409932] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.856s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.411410] env[61974]: INFO nova.compute.claims [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 591.919742] env[61974]: DEBUG nova.compute.utils [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 591.924952] env[61974]: DEBUG nova.compute.manager [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 591.925177] env[61974]: DEBUG nova.network.neutron [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 592.200825] env[61974]: DEBUG nova.policy [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e3d7aa826e9344fea6c4efce6cce1e56', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '02040149c2f044ed8cdc4f70540342e5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 592.428963] env[61974]: DEBUG nova.compute.manager [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 592.567419] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef2622b5-f139-4d78-a480-3ec23a8e9376 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.578014] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d199891b-8592-4390-ac48-edf99e318de1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.618221] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c33ad0-09f7-4281-97e4-df80550bae3c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.631198] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594eeb74-8793-482d-a45a-af6784dce7d3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.654577] env[61974]: DEBUG nova.compute.provider_tree [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 593.157998] env[61974]: DEBUG nova.scheduler.client.report [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 593.201717] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Acquiring lock "b534ef37-c3d0-478e-86a9-4794251a00a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.201717] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Lock "b534ef37-c3d0-478e-86a9-4794251a00a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.444370] env[61974]: DEBUG nova.compute.manager [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 593.474226] env[61974]: DEBUG nova.virt.hardware [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 593.474452] env[61974]: DEBUG nova.virt.hardware [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 593.474819] env[61974]: DEBUG nova.virt.hardware [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 593.474819] env[61974]: DEBUG nova.virt.hardware [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 593.474889] env[61974]: DEBUG nova.virt.hardware [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 593.475034] env[61974]: DEBUG nova.virt.hardware [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 593.475327] env[61974]: DEBUG nova.virt.hardware [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 593.475426] env[61974]: DEBUG nova.virt.hardware [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 593.475668] env[61974]: DEBUG nova.virt.hardware [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 593.475953] env[61974]: DEBUG nova.virt.hardware [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 593.476327] env[61974]: DEBUG nova.virt.hardware [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 593.477492] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efdb7a79-ddc1-4bbf-81ad-3cc692ba0c4f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.488801] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a839abc-e0a0-43aa-b901-1f5866187bff {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.666405] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.256s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 593.669069] env[61974]: DEBUG nova.compute.manager [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 593.676429] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.266s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.676429] env[61974]: INFO nova.compute.claims [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 593.705264] env[61974]: DEBUG nova.compute.manager [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 594.100807] env[61974]: DEBUG nova.network.neutron [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Successfully created port: 6f5a8b7d-3eac-472e-9d90-a9da5e2597eb {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 594.183908] env[61974]: DEBUG nova.compute.utils [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 594.185820] env[61974]: DEBUG nova.compute.manager [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 594.185998] env[61974]: DEBUG nova.network.neutron [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 594.232014] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.399994] env[61974]: DEBUG nova.policy [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9b8208381f7348f7a394fe032700ea55', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0d50c4e6824e42ca8c5df2443aeb74dc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 594.694954] env[61974]: DEBUG nova.compute.manager [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 594.838196] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35809abe-602b-4a44-9b9a-8ef33f4ae57c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.847849] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc0389f-07be-4967-89eb-142bd4c778e2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.887458] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ec0a0c-bf77-42db-a8d7-969c665669b0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.895531] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585c7108-53b1-4ad4-99aa-b87229873028 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.909973] env[61974]: DEBUG nova.compute.provider_tree [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 595.286526] env[61974]: DEBUG oslo_concurrency.lockutils [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Acquiring lock "88c3e670-b97a-4797-8821-cc24d2d07115" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.289417] env[61974]: DEBUG oslo_concurrency.lockutils [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Lock "88c3e670-b97a-4797-8821-cc24d2d07115" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.415036] env[61974]: DEBUG nova.scheduler.client.report [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 595.710925] env[61974]: DEBUG nova.compute.manager [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 595.740229] env[61974]: DEBUG nova.virt.hardware [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 595.740448] env[61974]: DEBUG nova.virt.hardware [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 595.740604] env[61974]: DEBUG nova.virt.hardware [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 595.740916] env[61974]: DEBUG nova.virt.hardware [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 595.740916] env[61974]: DEBUG nova.virt.hardware [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 595.742021] env[61974]: DEBUG nova.virt.hardware [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 595.742374] env[61974]: DEBUG nova.virt.hardware [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 595.742539] env[61974]: DEBUG nova.virt.hardware [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 595.743263] env[61974]: DEBUG nova.virt.hardware [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 595.743263] env[61974]: DEBUG nova.virt.hardware [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 595.743263] env[61974]: DEBUG nova.virt.hardware [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 595.744382] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36875814-0cbe-4026-94c9-56bd67441ae4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.756166] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c3ee79-3d1b-4efa-8525-7d42a837b41b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.794465] env[61974]: DEBUG nova.compute.manager [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 595.925790] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.252s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.925790] env[61974]: DEBUG nova.compute.manager [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 595.928459] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.697s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.932659] env[61974]: INFO nova.compute.claims [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 595.961331] env[61974]: DEBUG nova.network.neutron [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Successfully created port: 4a5a632c-ff7c-4a1e-b413-6abfb920fb9a {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 595.966165] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Acquiring lock "722b2d1a-9b8a-4120-85ce-c15cddd46479" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.966165] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Lock "722b2d1a-9b8a-4120-85ce-c15cddd46479" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.319697] env[61974]: DEBUG oslo_concurrency.lockutils [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.440145] env[61974]: DEBUG nova.compute.utils [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 596.443637] env[61974]: DEBUG nova.compute.manager [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 596.443637] env[61974]: DEBUG nova.network.neutron [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 596.468478] env[61974]: DEBUG nova.compute.manager [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 596.715288] env[61974]: DEBUG nova.policy [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7983fdd8bf664776a5cded0474f1f780', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9e31c7439644f9591469e6795390465', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 596.736826] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Acquiring lock "9b3b4b80-9de4-4e59-b211-38bc8d9caeae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.737372] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Lock "9b3b4b80-9de4-4e59-b211-38bc8d9caeae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.948971] env[61974]: DEBUG nova.compute.manager [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 597.001176] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.104820] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d31ab24-49f3-4356-a861-c26a010ac90a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.113227] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ca2e2e-44f8-4a55-b248-55599c0f8cec {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.145656] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a3bb99-f14d-447a-8f45-218b926d5380 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.153326] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4385925d-756d-4e56-a02a-d9cb6a5c4c1f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.170019] env[61974]: DEBUG nova.compute.provider_tree [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 597.240866] env[61974]: DEBUG nova.compute.manager [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 597.672756] env[61974]: DEBUG nova.scheduler.client.report [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 597.774799] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.938598] env[61974]: ERROR nova.compute.manager [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1c9380de-b680-410d-a930-68db86f0cdbd, please check neutron logs for more information. [ 597.938598] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 597.938598] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 597.938598] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 597.938598] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 597.938598] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 597.938598] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 597.938598] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 597.938598] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.938598] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 597.938598] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.938598] env[61974]: ERROR nova.compute.manager raise self.value [ 597.938598] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 597.938598] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 597.938598] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 597.938598] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 597.939237] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 597.939237] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 597.939237] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1c9380de-b680-410d-a930-68db86f0cdbd, please check neutron logs for more information. [ 597.939237] env[61974]: ERROR nova.compute.manager [ 597.939237] env[61974]: Traceback (most recent call last): [ 597.939237] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 597.939237] env[61974]: listener.cb(fileno) [ 597.939237] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 597.939237] env[61974]: result = function(*args, **kwargs) [ 597.939237] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 597.939237] env[61974]: return func(*args, **kwargs) [ 597.939237] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 597.939237] env[61974]: raise e [ 597.939237] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 597.939237] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 597.939237] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 597.939237] env[61974]: created_port_ids = self._update_ports_for_instance( [ 597.939237] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 597.939237] env[61974]: with excutils.save_and_reraise_exception(): [ 597.939237] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.939237] env[61974]: self.force_reraise() [ 597.939237] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.939237] env[61974]: raise self.value [ 597.939237] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 597.939237] env[61974]: updated_port = self._update_port( [ 597.939237] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 597.939237] env[61974]: _ensure_no_port_binding_failure(port) [ 597.939237] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 597.939237] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 597.939963] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 1c9380de-b680-410d-a930-68db86f0cdbd, please check neutron logs for more information. [ 597.939963] env[61974]: Removing descriptor: 15 [ 597.941307] env[61974]: ERROR nova.compute.manager [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1c9380de-b680-410d-a930-68db86f0cdbd, please check neutron logs for more information. [ 597.941307] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] Traceback (most recent call last): [ 597.941307] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 597.941307] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] yield resources [ 597.941307] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 597.941307] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] self.driver.spawn(context, instance, image_meta, [ 597.941307] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 597.941307] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 597.941307] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 597.941307] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] vm_ref = self.build_virtual_machine(instance, [ 597.941307] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 597.941680] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] vif_infos = vmwarevif.get_vif_info(self._session, [ 597.941680] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 597.941680] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] for vif in network_info: [ 597.941680] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 597.941680] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] return self._sync_wrapper(fn, *args, **kwargs) [ 597.941680] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 597.941680] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] self.wait() [ 597.941680] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 597.941680] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] self[:] = self._gt.wait() [ 597.941680] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 597.941680] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] return self._exit_event.wait() [ 597.941680] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 597.941680] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] result = hub.switch() [ 597.942046] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 597.942046] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] return self.greenlet.switch() [ 597.942046] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 597.942046] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] result = function(*args, **kwargs) [ 597.942046] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 597.942046] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] return func(*args, **kwargs) [ 597.942046] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 597.942046] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] raise e [ 597.942046] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 597.942046] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] nwinfo = self.network_api.allocate_for_instance( [ 597.942046] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 597.942046] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] created_port_ids = self._update_ports_for_instance( [ 597.942046] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 597.942348] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] with excutils.save_and_reraise_exception(): [ 597.942348] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.942348] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] self.force_reraise() [ 597.942348] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.942348] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] raise self.value [ 597.942348] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 597.942348] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] updated_port = self._update_port( [ 597.942348] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 597.942348] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] _ensure_no_port_binding_failure(port) [ 597.942348] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 597.942348] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] raise exception.PortBindingFailed(port_id=port['id']) [ 597.942348] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] nova.exception.PortBindingFailed: Binding failed for port 1c9380de-b680-410d-a930-68db86f0cdbd, please check neutron logs for more information. [ 597.942348] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] [ 597.942713] env[61974]: INFO nova.compute.manager [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Terminating instance [ 597.957251] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Acquiring lock "refresh_cache-811de811-d683-44b4-9a25-33923f235e3e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 597.957939] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Acquired lock "refresh_cache-811de811-d683-44b4-9a25-33923f235e3e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.957939] env[61974]: DEBUG nova.network.neutron [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 597.964045] env[61974]: DEBUG nova.compute.manager [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 597.992708] env[61974]: DEBUG nova.virt.hardware [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 597.993010] env[61974]: DEBUG nova.virt.hardware [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 597.993175] env[61974]: DEBUG nova.virt.hardware [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 597.993448] env[61974]: DEBUG nova.virt.hardware [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 597.993513] env[61974]: DEBUG nova.virt.hardware [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 597.993673] env[61974]: DEBUG nova.virt.hardware [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 597.994077] env[61974]: DEBUG nova.virt.hardware [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 597.994201] env[61974]: DEBUG nova.virt.hardware [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 597.994392] env[61974]: DEBUG nova.virt.hardware [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 597.994559] env[61974]: DEBUG nova.virt.hardware [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 597.994732] env[61974]: DEBUG nova.virt.hardware [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 597.996359] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46f8ac5-aea9-4012-96cd-9d23dabfb6b5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.005724] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad472e0d-cf6c-4170-a4c0-3c0afc9d9f9d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.161663] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._sync_power_states {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 598.180357] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.252s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.181557] env[61974]: DEBUG nova.compute.manager [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 598.185929] env[61974]: DEBUG oslo_concurrency.lockutils [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.866s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.187890] env[61974]: INFO nova.compute.claims [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 598.198225] env[61974]: DEBUG nova.network.neutron [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Successfully created port: 2b6b7bc2-7997-4166-8cc1-3d24a561b16e {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 598.565075] env[61974]: DEBUG nova.network.neutron [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 598.653900] env[61974]: DEBUG nova.compute.manager [req-04419429-7e7f-4a24-a13d-fe73733e1cd6 req-4c606124-4dd1-49b1-a12d-24597bcc5e69 service nova] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Received event network-changed-1c9380de-b680-410d-a930-68db86f0cdbd {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 598.653900] env[61974]: DEBUG nova.compute.manager [req-04419429-7e7f-4a24-a13d-fe73733e1cd6 req-4c606124-4dd1-49b1-a12d-24597bcc5e69 service nova] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Refreshing instance network info cache due to event network-changed-1c9380de-b680-410d-a930-68db86f0cdbd. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 598.654072] env[61974]: DEBUG oslo_concurrency.lockutils [req-04419429-7e7f-4a24-a13d-fe73733e1cd6 req-4c606124-4dd1-49b1-a12d-24597bcc5e69 service nova] Acquiring lock "refresh_cache-811de811-d683-44b4-9a25-33923f235e3e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.674850] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Getting list of instances from cluster (obj){ [ 598.674850] env[61974]: value = "domain-c8" [ 598.674850] env[61974]: _type = "ClusterComputeResource" [ 598.674850] env[61974]: } {{(pid=61974) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 598.675405] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49da3332-f5f1-4e81-9a65-240a66ad7248 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.686521] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Got total of 0 instances {{(pid=61974) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 598.686969] env[61974]: WARNING nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] While synchronizing instance power states, found 5 instances in the database and 0 instances on the hypervisor. [ 598.687160] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Triggering sync for uuid 811de811-d683-44b4-9a25-33923f235e3e {{(pid=61974) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 598.687383] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Triggering sync for uuid a054a98f-e7b4-422c-bd5b-4e478f4a94d2 {{(pid=61974) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 598.687545] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Triggering sync for uuid b637fed5-951c-4e8e-95ae-410e1ec3ecc8 {{(pid=61974) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 598.687695] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Triggering sync for uuid b15f2e60-9ea6-49ea-be71-6770d3f48e1d {{(pid=61974) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 598.687846] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Triggering sync for uuid b534ef37-c3d0-478e-86a9-4794251a00a1 {{(pid=61974) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 598.691623] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "811de811-d683-44b4-9a25-33923f235e3e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.691891] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "a054a98f-e7b4-422c-bd5b-4e478f4a94d2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.692104] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "b637fed5-951c-4e8e-95ae-410e1ec3ecc8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.692331] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "b15f2e60-9ea6-49ea-be71-6770d3f48e1d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.692499] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "b534ef37-c3d0-478e-86a9-4794251a00a1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.692688] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 598.692907] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 598.694522] env[61974]: DEBUG nova.compute.utils [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 598.700693] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager.update_available_resource {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 598.706607] env[61974]: DEBUG nova.compute.manager [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 598.706607] env[61974]: DEBUG nova.network.neutron [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 598.894809] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Acquiring lock "43e272e2-9256-4535-882e-3954574d5485" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.894809] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Lock "43e272e2-9256-4535-882e-3954574d5485" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.983435] env[61974]: DEBUG nova.policy [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '46edf1b98540482787341493766c2b7f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '04bd6ade09cb456a8dd0eb8b2b34a955', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 599.057609] env[61974]: DEBUG nova.network.neutron [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.207029] env[61974]: DEBUG nova.compute.manager [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 599.225694] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.398228] env[61974]: DEBUG nova.compute.manager [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 599.423987] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5bea8f3-db96-4697-ac8b-171fae748511 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.434025] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-065c591b-a0af-4f33-96d4-2c4c997c51b8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.478497] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a4c76b-c7b0-4056-89da-8ae3d1fac687 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.486731] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce8a8ec-881f-4eed-9864-f86582a8e424 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.503026] env[61974]: DEBUG nova.compute.provider_tree [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 599.569322] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Releasing lock "refresh_cache-811de811-d683-44b4-9a25-33923f235e3e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.569322] env[61974]: DEBUG nova.compute.manager [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 599.569322] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 599.569322] env[61974]: DEBUG oslo_concurrency.lockutils [req-04419429-7e7f-4a24-a13d-fe73733e1cd6 req-4c606124-4dd1-49b1-a12d-24597bcc5e69 service nova] Acquired lock "refresh_cache-811de811-d683-44b4-9a25-33923f235e3e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.569322] env[61974]: DEBUG nova.network.neutron [req-04419429-7e7f-4a24-a13d-fe73733e1cd6 req-4c606124-4dd1-49b1-a12d-24597bcc5e69 service nova] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Refreshing network info cache for port 1c9380de-b680-410d-a930-68db86f0cdbd {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 599.569476] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d3fc1aa8-f5d8-4152-8e41-2cbb31df590f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.583855] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2298e032-e096-47a1-8379-ca9e2f5aed35 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.607807] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 811de811-d683-44b4-9a25-33923f235e3e could not be found. [ 599.608047] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 599.608442] env[61974]: INFO nova.compute.manager [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 599.608869] env[61974]: DEBUG oslo.service.loopingcall [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 599.608981] env[61974]: DEBUG nova.compute.manager [-] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 599.609077] env[61974]: DEBUG nova.network.neutron [-] [instance: 811de811-d683-44b4-9a25-33923f235e3e] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 599.722744] env[61974]: DEBUG nova.network.neutron [-] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 599.929320] env[61974]: ERROR nova.compute.manager [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6f5a8b7d-3eac-472e-9d90-a9da5e2597eb, please check neutron logs for more information. [ 599.929320] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 599.929320] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 599.929320] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 599.929320] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 599.929320] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 599.929320] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 599.929320] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 599.929320] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 599.929320] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 599.929320] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 599.929320] env[61974]: ERROR nova.compute.manager raise self.value [ 599.929320] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 599.929320] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 599.929320] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 599.929320] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 599.929998] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 599.929998] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 599.929998] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6f5a8b7d-3eac-472e-9d90-a9da5e2597eb, please check neutron logs for more information. [ 599.929998] env[61974]: ERROR nova.compute.manager [ 599.929998] env[61974]: Traceback (most recent call last): [ 599.929998] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 599.929998] env[61974]: listener.cb(fileno) [ 599.929998] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 599.929998] env[61974]: result = function(*args, **kwargs) [ 599.929998] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 599.929998] env[61974]: return func(*args, **kwargs) [ 599.929998] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 599.929998] env[61974]: raise e [ 599.929998] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 599.929998] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 599.929998] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 599.929998] env[61974]: created_port_ids = self._update_ports_for_instance( [ 599.929998] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 599.929998] env[61974]: with excutils.save_and_reraise_exception(): [ 599.929998] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 599.929998] env[61974]: self.force_reraise() [ 599.929998] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 599.929998] env[61974]: raise self.value [ 599.929998] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 599.929998] env[61974]: updated_port = self._update_port( [ 599.929998] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 599.929998] env[61974]: _ensure_no_port_binding_failure(port) [ 599.929998] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 599.929998] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 599.931019] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 6f5a8b7d-3eac-472e-9d90-a9da5e2597eb, please check neutron logs for more information. [ 599.931019] env[61974]: Removing descriptor: 16 [ 599.931019] env[61974]: ERROR nova.compute.manager [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6f5a8b7d-3eac-472e-9d90-a9da5e2597eb, please check neutron logs for more information. [ 599.931019] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Traceback (most recent call last): [ 599.931019] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 599.931019] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] yield resources [ 599.931019] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 599.931019] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] self.driver.spawn(context, instance, image_meta, [ 599.931019] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 599.931019] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 599.931019] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 599.931019] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] vm_ref = self.build_virtual_machine(instance, [ 599.931532] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 599.931532] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] vif_infos = vmwarevif.get_vif_info(self._session, [ 599.931532] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 599.931532] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] for vif in network_info: [ 599.931532] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 599.931532] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] return self._sync_wrapper(fn, *args, **kwargs) [ 599.931532] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 599.931532] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] self.wait() [ 599.931532] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 599.931532] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] self[:] = self._gt.wait() [ 599.931532] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 599.931532] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] return self._exit_event.wait() [ 599.931532] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 599.931867] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] result = hub.switch() [ 599.931867] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 599.931867] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] return self.greenlet.switch() [ 599.931867] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 599.931867] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] result = function(*args, **kwargs) [ 599.931867] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 599.931867] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] return func(*args, **kwargs) [ 599.931867] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 599.931867] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] raise e [ 599.931867] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 599.931867] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] nwinfo = self.network_api.allocate_for_instance( [ 599.931867] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 599.931867] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] created_port_ids = self._update_ports_for_instance( [ 599.932249] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 599.932249] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] with excutils.save_and_reraise_exception(): [ 599.932249] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 599.932249] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] self.force_reraise() [ 599.932249] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 599.932249] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] raise self.value [ 599.932249] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 599.932249] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] updated_port = self._update_port( [ 599.932249] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 599.932249] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] _ensure_no_port_binding_failure(port) [ 599.932249] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 599.932249] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] raise exception.PortBindingFailed(port_id=port['id']) [ 599.932527] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] nova.exception.PortBindingFailed: Binding failed for port 6f5a8b7d-3eac-472e-9d90-a9da5e2597eb, please check neutron logs for more information. [ 599.932527] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] [ 599.932527] env[61974]: INFO nova.compute.manager [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Terminating instance [ 599.936032] env[61974]: DEBUG oslo_concurrency.lockutils [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Acquiring lock "refresh_cache-a054a98f-e7b4-422c-bd5b-4e478f4a94d2" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.936032] env[61974]: DEBUG oslo_concurrency.lockutils [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Acquired lock "refresh_cache-a054a98f-e7b4-422c-bd5b-4e478f4a94d2" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.936032] env[61974]: DEBUG nova.network.neutron [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 599.944691] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.009057] env[61974]: DEBUG nova.scheduler.client.report [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 600.165431] env[61974]: DEBUG nova.network.neutron [req-04419429-7e7f-4a24-a13d-fe73733e1cd6 req-4c606124-4dd1-49b1-a12d-24597bcc5e69 service nova] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 600.218662] env[61974]: DEBUG nova.compute.manager [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 600.226237] env[61974]: DEBUG nova.network.neutron [-] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.252193] env[61974]: DEBUG nova.virt.hardware [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 600.252465] env[61974]: DEBUG nova.virt.hardware [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 600.252617] env[61974]: DEBUG nova.virt.hardware [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 600.252795] env[61974]: DEBUG nova.virt.hardware [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 600.252942] env[61974]: DEBUG nova.virt.hardware [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 600.253605] env[61974]: DEBUG nova.virt.hardware [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 600.253605] env[61974]: DEBUG nova.virt.hardware [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 600.253773] env[61974]: DEBUG nova.virt.hardware [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 600.253940] env[61974]: DEBUG nova.virt.hardware [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 600.254228] env[61974]: DEBUG nova.virt.hardware [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 600.255118] env[61974]: DEBUG nova.virt.hardware [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 600.256897] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad1a1f9-2d88-42a8-854e-0a252b7fd312 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.268405] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8baed3a9-dfe3-47f9-a7a6-c44b3329d293 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.297874] env[61974]: DEBUG nova.network.neutron [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Successfully created port: 7e9e80d1-55d8-43af-bbba-f654b8b19a26 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 600.441511] env[61974]: DEBUG nova.network.neutron [req-04419429-7e7f-4a24-a13d-fe73733e1cd6 req-4c606124-4dd1-49b1-a12d-24597bcc5e69 service nova] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.479654] env[61974]: DEBUG nova.network.neutron [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 600.516983] env[61974]: DEBUG oslo_concurrency.lockutils [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.331s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.517542] env[61974]: DEBUG nova.compute.manager [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 600.520480] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.519s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.521932] env[61974]: INFO nova.compute.claims [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 600.684665] env[61974]: DEBUG nova.network.neutron [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.730234] env[61974]: INFO nova.compute.manager [-] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Took 1.12 seconds to deallocate network for instance. [ 600.732934] env[61974]: DEBUG nova.compute.claims [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 600.733171] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.801964] env[61974]: DEBUG nova.compute.manager [req-be09790a-82b5-4e6e-b849-d2dd97a09a12 req-0ce74567-3616-407a-a7d3-2f2fbb8b2f7b service nova] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Received event network-changed-4a5a632c-ff7c-4a1e-b413-6abfb920fb9a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 600.802375] env[61974]: DEBUG nova.compute.manager [req-be09790a-82b5-4e6e-b849-d2dd97a09a12 req-0ce74567-3616-407a-a7d3-2f2fbb8b2f7b service nova] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Refreshing instance network info cache due to event network-changed-4a5a632c-ff7c-4a1e-b413-6abfb920fb9a. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 600.802475] env[61974]: DEBUG oslo_concurrency.lockutils [req-be09790a-82b5-4e6e-b849-d2dd97a09a12 req-0ce74567-3616-407a-a7d3-2f2fbb8b2f7b service nova] Acquiring lock "refresh_cache-b637fed5-951c-4e8e-95ae-410e1ec3ecc8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.802567] env[61974]: DEBUG oslo_concurrency.lockutils [req-be09790a-82b5-4e6e-b849-d2dd97a09a12 req-0ce74567-3616-407a-a7d3-2f2fbb8b2f7b service nova] Acquired lock "refresh_cache-b637fed5-951c-4e8e-95ae-410e1ec3ecc8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.802756] env[61974]: DEBUG nova.network.neutron [req-be09790a-82b5-4e6e-b849-d2dd97a09a12 req-0ce74567-3616-407a-a7d3-2f2fbb8b2f7b service nova] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Refreshing network info cache for port 4a5a632c-ff7c-4a1e-b413-6abfb920fb9a {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 600.945667] env[61974]: DEBUG oslo_concurrency.lockutils [req-04419429-7e7f-4a24-a13d-fe73733e1cd6 req-4c606124-4dd1-49b1-a12d-24597bcc5e69 service nova] Releasing lock "refresh_cache-811de811-d683-44b4-9a25-33923f235e3e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.026413] env[61974]: DEBUG nova.compute.utils [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 601.031860] env[61974]: DEBUG nova.compute.manager [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 601.031860] env[61974]: DEBUG nova.network.neutron [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 601.192226] env[61974]: DEBUG nova.policy [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6b3ba065bd61480b80da599700176b2e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f21ff142189c48f9856c664e8a7042ce', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 601.195818] env[61974]: DEBUG oslo_concurrency.lockutils [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Releasing lock "refresh_cache-a054a98f-e7b4-422c-bd5b-4e478f4a94d2" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.195947] env[61974]: DEBUG nova.compute.manager [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 601.197115] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 601.197115] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-62524b7b-4848-4de2-afb0-e41e278b2cd9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.210064] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24dce7f2-aa26-4786-8987-53cd59719e07 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.239717] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a054a98f-e7b4-422c-bd5b-4e478f4a94d2 could not be found. [ 601.239717] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 601.239717] env[61974]: INFO nova.compute.manager [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 601.240282] env[61974]: DEBUG oslo.service.loopingcall [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 601.240646] env[61974]: DEBUG nova.compute.manager [-] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 601.240767] env[61974]: DEBUG nova.network.neutron [-] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 601.395136] env[61974]: DEBUG nova.network.neutron [-] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 601.414253] env[61974]: DEBUG nova.network.neutron [req-be09790a-82b5-4e6e-b849-d2dd97a09a12 req-0ce74567-3616-407a-a7d3-2f2fbb8b2f7b service nova] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 601.485218] env[61974]: DEBUG nova.compute.manager [req-5b02185c-4e6d-4b4a-b8f1-4861d1dad2fd req-c944e72b-c337-4153-8bb8-d54e7cfc2779 service nova] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Received event network-vif-deleted-1c9380de-b680-410d-a930-68db86f0cdbd {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 601.485218] env[61974]: DEBUG nova.compute.manager [req-5b02185c-4e6d-4b4a-b8f1-4861d1dad2fd req-c944e72b-c337-4153-8bb8-d54e7cfc2779 service nova] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Received event network-changed-6f5a8b7d-3eac-472e-9d90-a9da5e2597eb {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 601.485365] env[61974]: DEBUG nova.compute.manager [req-5b02185c-4e6d-4b4a-b8f1-4861d1dad2fd req-c944e72b-c337-4153-8bb8-d54e7cfc2779 service nova] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Refreshing instance network info cache due to event network-changed-6f5a8b7d-3eac-472e-9d90-a9da5e2597eb. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 601.485602] env[61974]: DEBUG oslo_concurrency.lockutils [req-5b02185c-4e6d-4b4a-b8f1-4861d1dad2fd req-c944e72b-c337-4153-8bb8-d54e7cfc2779 service nova] Acquiring lock "refresh_cache-a054a98f-e7b4-422c-bd5b-4e478f4a94d2" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.485747] env[61974]: DEBUG oslo_concurrency.lockutils [req-5b02185c-4e6d-4b4a-b8f1-4861d1dad2fd req-c944e72b-c337-4153-8bb8-d54e7cfc2779 service nova] Acquired lock "refresh_cache-a054a98f-e7b4-422c-bd5b-4e478f4a94d2" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.485960] env[61974]: DEBUG nova.network.neutron [req-5b02185c-4e6d-4b4a-b8f1-4861d1dad2fd req-c944e72b-c337-4153-8bb8-d54e7cfc2779 service nova] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Refreshing network info cache for port 6f5a8b7d-3eac-472e-9d90-a9da5e2597eb {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 601.531518] env[61974]: DEBUG nova.compute.manager [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 601.600398] env[61974]: ERROR nova.compute.manager [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4a5a632c-ff7c-4a1e-b413-6abfb920fb9a, please check neutron logs for more information. [ 601.600398] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 601.600398] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 601.600398] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 601.600398] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 601.600398] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 601.600398] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 601.600398] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 601.600398] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 601.600398] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 601.600398] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 601.600398] env[61974]: ERROR nova.compute.manager raise self.value [ 601.600398] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 601.600398] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 601.600398] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 601.600398] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 601.601740] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 601.601740] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 601.601740] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4a5a632c-ff7c-4a1e-b413-6abfb920fb9a, please check neutron logs for more information. [ 601.601740] env[61974]: ERROR nova.compute.manager [ 601.601740] env[61974]: Traceback (most recent call last): [ 601.601740] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 601.601740] env[61974]: listener.cb(fileno) [ 601.601740] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 601.601740] env[61974]: result = function(*args, **kwargs) [ 601.601740] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 601.601740] env[61974]: return func(*args, **kwargs) [ 601.601740] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 601.601740] env[61974]: raise e [ 601.601740] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 601.601740] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 601.601740] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 601.601740] env[61974]: created_port_ids = self._update_ports_for_instance( [ 601.601740] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 601.601740] env[61974]: with excutils.save_and_reraise_exception(): [ 601.601740] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 601.601740] env[61974]: self.force_reraise() [ 601.601740] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 601.601740] env[61974]: raise self.value [ 601.601740] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 601.601740] env[61974]: updated_port = self._update_port( [ 601.601740] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 601.601740] env[61974]: _ensure_no_port_binding_failure(port) [ 601.601740] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 601.601740] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 601.602770] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 4a5a632c-ff7c-4a1e-b413-6abfb920fb9a, please check neutron logs for more information. [ 601.602770] env[61974]: Removing descriptor: 17 [ 601.602770] env[61974]: ERROR nova.compute.manager [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4a5a632c-ff7c-4a1e-b413-6abfb920fb9a, please check neutron logs for more information. [ 601.602770] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Traceback (most recent call last): [ 601.602770] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 601.602770] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] yield resources [ 601.602770] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 601.602770] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] self.driver.spawn(context, instance, image_meta, [ 601.602770] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 601.602770] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 601.602770] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 601.602770] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] vm_ref = self.build_virtual_machine(instance, [ 601.604840] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 601.604840] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] vif_infos = vmwarevif.get_vif_info(self._session, [ 601.604840] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 601.604840] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] for vif in network_info: [ 601.604840] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 601.604840] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] return self._sync_wrapper(fn, *args, **kwargs) [ 601.604840] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 601.604840] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] self.wait() [ 601.604840] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 601.604840] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] self[:] = self._gt.wait() [ 601.604840] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 601.604840] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] return self._exit_event.wait() [ 601.604840] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 601.605474] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] result = hub.switch() [ 601.605474] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 601.605474] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] return self.greenlet.switch() [ 601.605474] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 601.605474] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] result = function(*args, **kwargs) [ 601.605474] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 601.605474] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] return func(*args, **kwargs) [ 601.605474] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 601.605474] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] raise e [ 601.605474] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 601.605474] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] nwinfo = self.network_api.allocate_for_instance( [ 601.605474] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 601.605474] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] created_port_ids = self._update_ports_for_instance( [ 601.605951] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 601.605951] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] with excutils.save_and_reraise_exception(): [ 601.605951] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 601.605951] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] self.force_reraise() [ 601.605951] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 601.605951] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] raise self.value [ 601.605951] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 601.605951] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] updated_port = self._update_port( [ 601.605951] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 601.605951] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] _ensure_no_port_binding_failure(port) [ 601.605951] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 601.605951] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] raise exception.PortBindingFailed(port_id=port['id']) [ 601.606290] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] nova.exception.PortBindingFailed: Binding failed for port 4a5a632c-ff7c-4a1e-b413-6abfb920fb9a, please check neutron logs for more information. [ 601.606290] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] [ 601.606290] env[61974]: INFO nova.compute.manager [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Terminating instance [ 601.606393] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Acquiring lock "refresh_cache-b637fed5-951c-4e8e-95ae-410e1ec3ecc8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.752620] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89a2448-d02d-4a9a-96ea-aff4f00d2aac {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.762601] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8274c35a-f578-4662-91c0-6c54d6e46345 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.799992] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4501c41a-2cbe-495f-8a5a-ee1d378725b4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.808144] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-812f88f4-ce5b-48f1-a828-e881db5d8b35 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.821717] env[61974]: DEBUG nova.compute.provider_tree [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.900675] env[61974]: DEBUG nova.network.neutron [-] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.016096] env[61974]: DEBUG nova.network.neutron [req-be09790a-82b5-4e6e-b849-d2dd97a09a12 req-0ce74567-3616-407a-a7d3-2f2fbb8b2f7b service nova] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.055247] env[61974]: DEBUG nova.network.neutron [req-5b02185c-4e6d-4b4a-b8f1-4861d1dad2fd req-c944e72b-c337-4153-8bb8-d54e7cfc2779 service nova] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 602.238469] env[61974]: DEBUG nova.network.neutron [req-5b02185c-4e6d-4b4a-b8f1-4861d1dad2fd req-c944e72b-c337-4153-8bb8-d54e7cfc2779 service nova] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.325389] env[61974]: DEBUG nova.scheduler.client.report [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 602.401505] env[61974]: INFO nova.compute.manager [-] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Took 1.16 seconds to deallocate network for instance. [ 602.405262] env[61974]: DEBUG nova.compute.claims [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 602.405262] env[61974]: DEBUG oslo_concurrency.lockutils [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.522605] env[61974]: DEBUG oslo_concurrency.lockutils [req-be09790a-82b5-4e6e-b849-d2dd97a09a12 req-0ce74567-3616-407a-a7d3-2f2fbb8b2f7b service nova] Releasing lock "refresh_cache-b637fed5-951c-4e8e-95ae-410e1ec3ecc8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.522605] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Acquired lock "refresh_cache-b637fed5-951c-4e8e-95ae-410e1ec3ecc8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.522605] env[61974]: DEBUG nova.network.neutron [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 602.550446] env[61974]: DEBUG nova.compute.manager [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 602.588496] env[61974]: DEBUG nova.virt.hardware [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 602.588954] env[61974]: DEBUG nova.virt.hardware [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 602.589309] env[61974]: DEBUG nova.virt.hardware [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 602.589627] env[61974]: DEBUG nova.virt.hardware [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 602.590166] env[61974]: DEBUG nova.virt.hardware [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 602.590521] env[61974]: DEBUG nova.virt.hardware [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 602.590848] env[61974]: DEBUG nova.virt.hardware [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 602.591200] env[61974]: DEBUG nova.virt.hardware [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 602.591485] env[61974]: DEBUG nova.virt.hardware [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 602.591863] env[61974]: DEBUG nova.virt.hardware [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 602.593374] env[61974]: DEBUG nova.virt.hardware [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 602.593374] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88665e13-f5d2-4d5f-bf44-de8620591153 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.605169] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104cf928-d958-4053-b4df-4166716370c1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.741218] env[61974]: DEBUG oslo_concurrency.lockutils [req-5b02185c-4e6d-4b4a-b8f1-4861d1dad2fd req-c944e72b-c337-4153-8bb8-d54e7cfc2779 service nova] Releasing lock "refresh_cache-a054a98f-e7b4-422c-bd5b-4e478f4a94d2" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.741303] env[61974]: DEBUG nova.compute.manager [req-5b02185c-4e6d-4b4a-b8f1-4861d1dad2fd req-c944e72b-c337-4153-8bb8-d54e7cfc2779 service nova] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Received event network-vif-deleted-6f5a8b7d-3eac-472e-9d90-a9da5e2597eb {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 602.835990] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.315s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.838301] env[61974]: DEBUG nova.compute.manager [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 602.844847] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.065s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.844847] env[61974]: INFO nova.compute.claims [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 603.080067] env[61974]: DEBUG nova.network.neutron [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 603.111199] env[61974]: DEBUG nova.compute.manager [req-c9ebaf01-92e5-46f9-9034-cd2f706c1129 req-e82afb59-a8d4-4959-ad2c-1022f723a9aa service nova] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Received event network-vif-deleted-4a5a632c-ff7c-4a1e-b413-6abfb920fb9a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 603.308441] env[61974]: DEBUG nova.network.neutron [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.338112] env[61974]: DEBUG nova.network.neutron [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Successfully created port: e4782adc-0b1b-4be5-a669-6d3dc3ac92d8 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 603.349831] env[61974]: DEBUG nova.compute.utils [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 603.354293] env[61974]: DEBUG nova.compute.manager [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 603.354806] env[61974]: DEBUG nova.network.neutron [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 603.590041] env[61974]: DEBUG nova.policy [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c666fd4e0f0427490d4ef7ad9669b08', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9daaa8311a694a1fbe03f08e32832298', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 603.812000] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Releasing lock "refresh_cache-b637fed5-951c-4e8e-95ae-410e1ec3ecc8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.816019] env[61974]: DEBUG nova.compute.manager [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 603.816019] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 603.816019] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-964572fa-5389-4094-af3d-297786917802 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.824647] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8ad1a6-cc47-4558-a707-4cf25d758402 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.851086] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b637fed5-951c-4e8e-95ae-410e1ec3ecc8 could not be found. [ 603.851435] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 603.852144] env[61974]: INFO nova.compute.manager [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 603.852144] env[61974]: DEBUG oslo.service.loopingcall [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 603.852427] env[61974]: DEBUG nova.compute.manager [-] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 603.852898] env[61974]: DEBUG nova.network.neutron [-] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 603.855018] env[61974]: DEBUG nova.compute.manager [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 604.029312] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5792154a-6655-4ea9-86cc-3d34070a9870 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.039414] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80349811-300c-4715-b972-2a50af6b9999 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.078466] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-574bbed1-8833-4f4d-952c-2d12230fe0c0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.087580] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4720512e-a132-4769-b0e5-c3c6a918cd3a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.102861] env[61974]: DEBUG nova.compute.provider_tree [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 604.128283] env[61974]: DEBUG nova.network.neutron [-] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 604.425044] env[61974]: ERROR nova.compute.manager [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2b6b7bc2-7997-4166-8cc1-3d24a561b16e, please check neutron logs for more information. [ 604.425044] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 604.425044] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 604.425044] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 604.425044] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 604.425044] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 604.425044] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 604.425044] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 604.425044] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 604.425044] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 604.425044] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 604.425044] env[61974]: ERROR nova.compute.manager raise self.value [ 604.425044] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 604.425044] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 604.425044] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 604.425044] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 604.425872] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 604.425872] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 604.425872] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2b6b7bc2-7997-4166-8cc1-3d24a561b16e, please check neutron logs for more information. [ 604.425872] env[61974]: ERROR nova.compute.manager [ 604.425872] env[61974]: Traceback (most recent call last): [ 604.425872] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 604.425872] env[61974]: listener.cb(fileno) [ 604.425872] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 604.425872] env[61974]: result = function(*args, **kwargs) [ 604.425872] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 604.425872] env[61974]: return func(*args, **kwargs) [ 604.425872] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 604.425872] env[61974]: raise e [ 604.425872] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 604.425872] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 604.425872] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 604.425872] env[61974]: created_port_ids = self._update_ports_for_instance( [ 604.425872] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 604.425872] env[61974]: with excutils.save_and_reraise_exception(): [ 604.425872] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 604.425872] env[61974]: self.force_reraise() [ 604.425872] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 604.425872] env[61974]: raise self.value [ 604.425872] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 604.425872] env[61974]: updated_port = self._update_port( [ 604.425872] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 604.425872] env[61974]: _ensure_no_port_binding_failure(port) [ 604.425872] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 604.425872] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 604.427169] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 2b6b7bc2-7997-4166-8cc1-3d24a561b16e, please check neutron logs for more information. [ 604.427169] env[61974]: Removing descriptor: 18 [ 604.427169] env[61974]: ERROR nova.compute.manager [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2b6b7bc2-7997-4166-8cc1-3d24a561b16e, please check neutron logs for more information. [ 604.427169] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Traceback (most recent call last): [ 604.427169] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 604.427169] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] yield resources [ 604.427169] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 604.427169] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] self.driver.spawn(context, instance, image_meta, [ 604.427169] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 604.427169] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 604.427169] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 604.427169] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] vm_ref = self.build_virtual_machine(instance, [ 604.427472] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 604.427472] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] vif_infos = vmwarevif.get_vif_info(self._session, [ 604.427472] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 604.427472] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] for vif in network_info: [ 604.427472] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 604.427472] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] return self._sync_wrapper(fn, *args, **kwargs) [ 604.427472] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 604.427472] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] self.wait() [ 604.427472] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 604.427472] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] self[:] = self._gt.wait() [ 604.427472] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 604.427472] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] return self._exit_event.wait() [ 604.427472] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 604.427808] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] result = hub.switch() [ 604.427808] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 604.427808] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] return self.greenlet.switch() [ 604.427808] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 604.427808] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] result = function(*args, **kwargs) [ 604.427808] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 604.427808] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] return func(*args, **kwargs) [ 604.427808] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 604.427808] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] raise e [ 604.427808] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 604.427808] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] nwinfo = self.network_api.allocate_for_instance( [ 604.427808] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 604.427808] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] created_port_ids = self._update_ports_for_instance( [ 604.428130] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 604.428130] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] with excutils.save_and_reraise_exception(): [ 604.428130] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 604.428130] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] self.force_reraise() [ 604.428130] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 604.428130] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] raise self.value [ 604.428130] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 604.428130] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] updated_port = self._update_port( [ 604.428130] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 604.428130] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] _ensure_no_port_binding_failure(port) [ 604.428130] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 604.428130] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] raise exception.PortBindingFailed(port_id=port['id']) [ 604.428414] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] nova.exception.PortBindingFailed: Binding failed for port 2b6b7bc2-7997-4166-8cc1-3d24a561b16e, please check neutron logs for more information. [ 604.428414] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] [ 604.428414] env[61974]: INFO nova.compute.manager [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Terminating instance [ 604.430739] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Acquiring lock "refresh_cache-b15f2e60-9ea6-49ea-be71-6770d3f48e1d" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 604.430934] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Acquired lock "refresh_cache-b15f2e60-9ea6-49ea-be71-6770d3f48e1d" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.431177] env[61974]: DEBUG nova.network.neutron [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 604.607942] env[61974]: DEBUG nova.scheduler.client.report [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 604.633210] env[61974]: DEBUG nova.network.neutron [-] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.868940] env[61974]: DEBUG nova.compute.manager [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 604.907737] env[61974]: DEBUG nova.virt.hardware [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 604.907959] env[61974]: DEBUG nova.virt.hardware [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 604.908130] env[61974]: DEBUG nova.virt.hardware [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 604.908308] env[61974]: DEBUG nova.virt.hardware [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 604.908447] env[61974]: DEBUG nova.virt.hardware [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 604.908590] env[61974]: DEBUG nova.virt.hardware [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 604.908796] env[61974]: DEBUG nova.virt.hardware [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 604.909055] env[61974]: DEBUG nova.virt.hardware [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 604.909717] env[61974]: DEBUG nova.virt.hardware [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 604.912335] env[61974]: DEBUG nova.virt.hardware [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 604.913122] env[61974]: DEBUG nova.virt.hardware [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 604.915387] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d5d8f7-df14-4b8c-a59b-424d5d2d7551 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.931780] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11257cb2-6c70-4a9f-a3a2-ed0fef392378 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.030653] env[61974]: DEBUG nova.network.neutron [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 605.119019] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.277s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.119019] env[61974]: DEBUG nova.compute.manager [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 605.122141] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.895s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.124127] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.124127] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 605.124127] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.178s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.125289] env[61974]: INFO nova.compute.claims [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 605.129025] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fddb654b-78e5-43bf-9703-09c36134227d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.140639] env[61974]: INFO nova.compute.manager [-] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Took 1.29 seconds to deallocate network for instance. [ 605.145304] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50471559-1432-4d92-b15c-26e812957872 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.152157] env[61974]: DEBUG nova.compute.claims [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 605.153295] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.166327] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec138be6-0956-4271-9b9d-603ae70dd1c7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.173667] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a85267-232d-4639-be3c-198adb46229c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.209796] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181454MB free_disk=178GB free_vcpus=48 pci_devices=None {{(pid=61974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 605.209971] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.403588] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Acquiring lock "2c4e7c81-67fd-4c5f-9e96-7256c26b228a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.403820] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Lock "2c4e7c81-67fd-4c5f-9e96-7256c26b228a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.407601] env[61974]: DEBUG nova.network.neutron [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.483735] env[61974]: DEBUG nova.network.neutron [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Successfully created port: b24ecf01-24e9-4274-a294-0789bdbed06d {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 605.631097] env[61974]: DEBUG nova.compute.utils [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 605.632061] env[61974]: DEBUG nova.compute.manager [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 605.632242] env[61974]: DEBUG nova.network.neutron [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 605.898685] env[61974]: DEBUG nova.policy [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '46edf1b98540482787341493766c2b7f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '04bd6ade09cb456a8dd0eb8b2b34a955', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 605.906629] env[61974]: DEBUG nova.compute.manager [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 605.913877] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Releasing lock "refresh_cache-b15f2e60-9ea6-49ea-be71-6770d3f48e1d" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.914286] env[61974]: DEBUG nova.compute.manager [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 605.914565] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 605.914940] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-30dfca93-4dc5-46b6-aa3d-db948e253f10 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.932296] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6755b414-e1e9-44cf-9e16-67aab319512d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.965114] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b15f2e60-9ea6-49ea-be71-6770d3f48e1d could not be found. [ 605.965683] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 605.966041] env[61974]: INFO nova.compute.manager [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 605.966645] env[61974]: DEBUG oslo.service.loopingcall [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 605.966723] env[61974]: DEBUG nova.compute.manager [-] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 605.966851] env[61974]: DEBUG nova.network.neutron [-] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 606.044120] env[61974]: DEBUG nova.network.neutron [-] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.135977] env[61974]: DEBUG nova.compute.manager [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 606.313045] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6abbe516-b703-40a9-a1c1-f41fc49093bd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.323204] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba1bc819-2238-48d8-9dfe-e766b438fbf8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.366767] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf0f761-fcc7-4611-a8ba-6ab0c0abe8cc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.372377] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e0e4f7-0a92-4e1b-b2d1-bda446005b5a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.388857] env[61974]: DEBUG nova.compute.provider_tree [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 606.436755] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.548125] env[61974]: DEBUG nova.network.neutron [-] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.795926] env[61974]: DEBUG nova.compute.manager [req-04bee4f5-9dff-4665-90a8-6fee415ab3dc req-7ff4f1b2-330d-4d59-b181-664812e87720 service nova] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Received event network-changed-2b6b7bc2-7997-4166-8cc1-3d24a561b16e {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 606.796204] env[61974]: DEBUG nova.compute.manager [req-04bee4f5-9dff-4665-90a8-6fee415ab3dc req-7ff4f1b2-330d-4d59-b181-664812e87720 service nova] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Refreshing instance network info cache due to event network-changed-2b6b7bc2-7997-4166-8cc1-3d24a561b16e. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 606.796430] env[61974]: DEBUG oslo_concurrency.lockutils [req-04bee4f5-9dff-4665-90a8-6fee415ab3dc req-7ff4f1b2-330d-4d59-b181-664812e87720 service nova] Acquiring lock "refresh_cache-b15f2e60-9ea6-49ea-be71-6770d3f48e1d" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.796570] env[61974]: DEBUG oslo_concurrency.lockutils [req-04bee4f5-9dff-4665-90a8-6fee415ab3dc req-7ff4f1b2-330d-4d59-b181-664812e87720 service nova] Acquired lock "refresh_cache-b15f2e60-9ea6-49ea-be71-6770d3f48e1d" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.796726] env[61974]: DEBUG nova.network.neutron [req-04bee4f5-9dff-4665-90a8-6fee415ab3dc req-7ff4f1b2-330d-4d59-b181-664812e87720 service nova] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Refreshing network info cache for port 2b6b7bc2-7997-4166-8cc1-3d24a561b16e {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 606.893580] env[61974]: DEBUG nova.scheduler.client.report [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 607.046559] env[61974]: ERROR nova.compute.manager [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7e9e80d1-55d8-43af-bbba-f654b8b19a26, please check neutron logs for more information. [ 607.046559] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 607.046559] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 607.046559] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 607.046559] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 607.046559] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 607.046559] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 607.046559] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 607.046559] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.046559] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 607.046559] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.046559] env[61974]: ERROR nova.compute.manager raise self.value [ 607.046559] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 607.046559] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 607.046559] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.046559] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 607.047018] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.047018] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 607.047018] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7e9e80d1-55d8-43af-bbba-f654b8b19a26, please check neutron logs for more information. [ 607.047018] env[61974]: ERROR nova.compute.manager [ 607.047018] env[61974]: Traceback (most recent call last): [ 607.047018] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 607.047018] env[61974]: listener.cb(fileno) [ 607.047018] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 607.047018] env[61974]: result = function(*args, **kwargs) [ 607.047018] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 607.047018] env[61974]: return func(*args, **kwargs) [ 607.047018] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 607.047018] env[61974]: raise e [ 607.047018] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 607.047018] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 607.047018] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 607.047018] env[61974]: created_port_ids = self._update_ports_for_instance( [ 607.047018] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 607.047018] env[61974]: with excutils.save_and_reraise_exception(): [ 607.047018] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.047018] env[61974]: self.force_reraise() [ 607.047018] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.047018] env[61974]: raise self.value [ 607.047018] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 607.047018] env[61974]: updated_port = self._update_port( [ 607.047018] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.047018] env[61974]: _ensure_no_port_binding_failure(port) [ 607.047018] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.047018] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 607.047839] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 7e9e80d1-55d8-43af-bbba-f654b8b19a26, please check neutron logs for more information. [ 607.047839] env[61974]: Removing descriptor: 20 [ 607.047839] env[61974]: ERROR nova.compute.manager [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7e9e80d1-55d8-43af-bbba-f654b8b19a26, please check neutron logs for more information. [ 607.047839] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Traceback (most recent call last): [ 607.047839] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 607.047839] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] yield resources [ 607.047839] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 607.047839] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] self.driver.spawn(context, instance, image_meta, [ 607.047839] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 607.047839] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 607.047839] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 607.047839] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] vm_ref = self.build_virtual_machine(instance, [ 607.048405] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 607.048405] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] vif_infos = vmwarevif.get_vif_info(self._session, [ 607.048405] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 607.048405] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] for vif in network_info: [ 607.048405] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 607.048405] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] return self._sync_wrapper(fn, *args, **kwargs) [ 607.048405] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 607.048405] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] self.wait() [ 607.048405] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 607.048405] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] self[:] = self._gt.wait() [ 607.048405] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 607.048405] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] return self._exit_event.wait() [ 607.048405] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 607.048795] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] result = hub.switch() [ 607.048795] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 607.048795] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] return self.greenlet.switch() [ 607.048795] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 607.048795] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] result = function(*args, **kwargs) [ 607.048795] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 607.048795] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] return func(*args, **kwargs) [ 607.048795] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 607.048795] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] raise e [ 607.048795] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 607.048795] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] nwinfo = self.network_api.allocate_for_instance( [ 607.048795] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 607.048795] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] created_port_ids = self._update_ports_for_instance( [ 607.049153] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 607.049153] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] with excutils.save_and_reraise_exception(): [ 607.049153] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.049153] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] self.force_reraise() [ 607.049153] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.049153] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] raise self.value [ 607.049153] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 607.049153] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] updated_port = self._update_port( [ 607.049153] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.049153] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] _ensure_no_port_binding_failure(port) [ 607.049153] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.049153] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] raise exception.PortBindingFailed(port_id=port['id']) [ 607.049470] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] nova.exception.PortBindingFailed: Binding failed for port 7e9e80d1-55d8-43af-bbba-f654b8b19a26, please check neutron logs for more information. [ 607.049470] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] [ 607.049470] env[61974]: INFO nova.compute.manager [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Terminating instance [ 607.050919] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Acquiring lock "refresh_cache-b534ef37-c3d0-478e-86a9-4794251a00a1" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.051085] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Acquired lock "refresh_cache-b534ef37-c3d0-478e-86a9-4794251a00a1" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.051637] env[61974]: DEBUG nova.network.neutron [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 607.056183] env[61974]: INFO nova.compute.manager [-] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Took 1.09 seconds to deallocate network for instance. [ 607.057341] env[61974]: DEBUG nova.compute.claims [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 607.057341] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.137148] env[61974]: DEBUG nova.network.neutron [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Successfully created port: fb14e838-6ca0-4225-902c-d9ca7d0ce70a {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 607.153819] env[61974]: DEBUG nova.compute.manager [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 607.186014] env[61974]: DEBUG nova.virt.hardware [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 607.186255] env[61974]: DEBUG nova.virt.hardware [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 607.186550] env[61974]: DEBUG nova.virt.hardware [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 607.186761] env[61974]: DEBUG nova.virt.hardware [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 607.186929] env[61974]: DEBUG nova.virt.hardware [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 607.187280] env[61974]: DEBUG nova.virt.hardware [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 607.187526] env[61974]: DEBUG nova.virt.hardware [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 607.187682] env[61974]: DEBUG nova.virt.hardware [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 607.187854] env[61974]: DEBUG nova.virt.hardware [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 607.188085] env[61974]: DEBUG nova.virt.hardware [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 607.188304] env[61974]: DEBUG nova.virt.hardware [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 607.189401] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b431de-56c6-4b78-b9d2-e420bb1b6e6d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.203933] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-061eeed6-3ce3-4d9b-891a-b3cf2277b3f6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.352230] env[61974]: DEBUG nova.network.neutron [req-04bee4f5-9dff-4665-90a8-6fee415ab3dc req-7ff4f1b2-330d-4d59-b181-664812e87720 service nova] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.404099] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.281s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.404604] env[61974]: DEBUG nova.compute.manager [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 607.419486] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 6.686s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.428727] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Acquiring lock "287496bf-b981-41d5-81fc-791d793c244e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.428978] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Lock "287496bf-b981-41d5-81fc-791d793c244e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.619230] env[61974]: DEBUG nova.network.neutron [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.726546] env[61974]: DEBUG nova.network.neutron [req-04bee4f5-9dff-4665-90a8-6fee415ab3dc req-7ff4f1b2-330d-4d59-b181-664812e87720 service nova] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.917060] env[61974]: DEBUG nova.compute.utils [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 607.917804] env[61974]: DEBUG nova.compute.manager [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 607.918095] env[61974]: DEBUG nova.network.neutron [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 608.102781] env[61974]: DEBUG nova.network.neutron [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.150934] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-017115c9-9e96-4283-9c46-e12bdec26f7c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.162082] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af1734a-9694-448f-95a9-59523b6f312f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.201635] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd3d004-cb81-42c4-b3cf-ae4242af485f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.206200] env[61974]: DEBUG nova.policy [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3112ea075a4f432a8bacc33472a16441', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '27f574589b5645bc8a95dd0263be342e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 608.216610] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c3d07b-8cbc-4b95-a5f1-ee7fa39540fc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.234254] env[61974]: DEBUG oslo_concurrency.lockutils [req-04bee4f5-9dff-4665-90a8-6fee415ab3dc req-7ff4f1b2-330d-4d59-b181-664812e87720 service nova] Releasing lock "refresh_cache-b15f2e60-9ea6-49ea-be71-6770d3f48e1d" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.234338] env[61974]: DEBUG nova.compute.manager [req-04bee4f5-9dff-4665-90a8-6fee415ab3dc req-7ff4f1b2-330d-4d59-b181-664812e87720 service nova] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Received event network-vif-deleted-2b6b7bc2-7997-4166-8cc1-3d24a561b16e {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 608.234883] env[61974]: DEBUG nova.compute.provider_tree [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 608.237583] env[61974]: ERROR nova.compute.manager [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e4782adc-0b1b-4be5-a669-6d3dc3ac92d8, please check neutron logs for more information. [ 608.237583] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 608.237583] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 608.237583] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 608.237583] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 608.237583] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 608.237583] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 608.237583] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 608.237583] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 608.237583] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 608.237583] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 608.237583] env[61974]: ERROR nova.compute.manager raise self.value [ 608.237583] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 608.237583] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 608.237583] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 608.237583] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 608.240767] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 608.240767] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 608.240767] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e4782adc-0b1b-4be5-a669-6d3dc3ac92d8, please check neutron logs for more information. [ 608.240767] env[61974]: ERROR nova.compute.manager [ 608.240767] env[61974]: Traceback (most recent call last): [ 608.240767] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 608.240767] env[61974]: listener.cb(fileno) [ 608.240767] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 608.240767] env[61974]: result = function(*args, **kwargs) [ 608.240767] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 608.240767] env[61974]: return func(*args, **kwargs) [ 608.240767] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 608.240767] env[61974]: raise e [ 608.240767] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 608.240767] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 608.240767] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 608.240767] env[61974]: created_port_ids = self._update_ports_for_instance( [ 608.240767] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 608.240767] env[61974]: with excutils.save_and_reraise_exception(): [ 608.240767] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 608.240767] env[61974]: self.force_reraise() [ 608.240767] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 608.240767] env[61974]: raise self.value [ 608.240767] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 608.240767] env[61974]: updated_port = self._update_port( [ 608.240767] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 608.240767] env[61974]: _ensure_no_port_binding_failure(port) [ 608.240767] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 608.240767] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 608.241729] env[61974]: nova.exception.PortBindingFailed: Binding failed for port e4782adc-0b1b-4be5-a669-6d3dc3ac92d8, please check neutron logs for more information. [ 608.241729] env[61974]: Removing descriptor: 15 [ 608.241729] env[61974]: ERROR nova.compute.manager [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e4782adc-0b1b-4be5-a669-6d3dc3ac92d8, please check neutron logs for more information. [ 608.241729] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Traceback (most recent call last): [ 608.241729] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 608.241729] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] yield resources [ 608.241729] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 608.241729] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] self.driver.spawn(context, instance, image_meta, [ 608.241729] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 608.241729] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] self._vmops.spawn(context, instance, image_meta, injected_files, [ 608.241729] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 608.241729] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] vm_ref = self.build_virtual_machine(instance, [ 608.242176] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 608.242176] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] vif_infos = vmwarevif.get_vif_info(self._session, [ 608.242176] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 608.242176] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] for vif in network_info: [ 608.242176] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 608.242176] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] return self._sync_wrapper(fn, *args, **kwargs) [ 608.242176] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 608.242176] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] self.wait() [ 608.242176] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 608.242176] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] self[:] = self._gt.wait() [ 608.242176] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 608.242176] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] return self._exit_event.wait() [ 608.242176] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 608.242595] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] result = hub.switch() [ 608.242595] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 608.242595] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] return self.greenlet.switch() [ 608.242595] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 608.242595] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] result = function(*args, **kwargs) [ 608.242595] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 608.242595] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] return func(*args, **kwargs) [ 608.242595] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 608.242595] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] raise e [ 608.242595] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 608.242595] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] nwinfo = self.network_api.allocate_for_instance( [ 608.242595] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 608.242595] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] created_port_ids = self._update_ports_for_instance( [ 608.243537] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 608.243537] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] with excutils.save_and_reraise_exception(): [ 608.243537] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 608.243537] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] self.force_reraise() [ 608.243537] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 608.243537] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] raise self.value [ 608.243537] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 608.243537] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] updated_port = self._update_port( [ 608.243537] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 608.243537] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] _ensure_no_port_binding_failure(port) [ 608.243537] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 608.243537] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] raise exception.PortBindingFailed(port_id=port['id']) [ 608.243955] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] nova.exception.PortBindingFailed: Binding failed for port e4782adc-0b1b-4be5-a669-6d3dc3ac92d8, please check neutron logs for more information. [ 608.243955] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] [ 608.243955] env[61974]: INFO nova.compute.manager [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Terminating instance [ 608.243955] env[61974]: DEBUG oslo_concurrency.lockutils [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Acquiring lock "refresh_cache-88c3e670-b97a-4797-8821-cc24d2d07115" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.243955] env[61974]: DEBUG oslo_concurrency.lockutils [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Acquired lock "refresh_cache-88c3e670-b97a-4797-8821-cc24d2d07115" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.243955] env[61974]: DEBUG nova.network.neutron [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 608.422193] env[61974]: DEBUG nova.compute.manager [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 608.609925] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Releasing lock "refresh_cache-b534ef37-c3d0-478e-86a9-4794251a00a1" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.610520] env[61974]: DEBUG nova.compute.manager [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 608.610742] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 608.611175] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1b92cff5-2214-4978-a7ee-f6e9b9882861 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.620961] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8881eb06-6ed8-41e9-b410-795e9ae6964e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.651563] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b534ef37-c3d0-478e-86a9-4794251a00a1 could not be found. [ 608.651977] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 608.652037] env[61974]: INFO nova.compute.manager [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 608.653384] env[61974]: DEBUG oslo.service.loopingcall [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 608.656018] env[61974]: DEBUG nova.compute.manager [-] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 608.656176] env[61974]: DEBUG nova.network.neutron [-] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 608.710768] env[61974]: DEBUG nova.network.neutron [-] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.743796] env[61974]: DEBUG nova.scheduler.client.report [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 608.798756] env[61974]: DEBUG nova.network.neutron [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.961082] env[61974]: DEBUG nova.compute.manager [req-ed68f05e-dc22-491c-9c58-355973067259 req-ce5d756b-0396-43fd-922e-85d9401f1be2 service nova] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Received event network-changed-e4782adc-0b1b-4be5-a669-6d3dc3ac92d8 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 608.961272] env[61974]: DEBUG nova.compute.manager [req-ed68f05e-dc22-491c-9c58-355973067259 req-ce5d756b-0396-43fd-922e-85d9401f1be2 service nova] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Refreshing instance network info cache due to event network-changed-e4782adc-0b1b-4be5-a669-6d3dc3ac92d8. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 608.961451] env[61974]: DEBUG oslo_concurrency.lockutils [req-ed68f05e-dc22-491c-9c58-355973067259 req-ce5d756b-0396-43fd-922e-85d9401f1be2 service nova] Acquiring lock "refresh_cache-88c3e670-b97a-4797-8821-cc24d2d07115" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.119142] env[61974]: DEBUG nova.network.neutron [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.163347] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Acquiring lock "62413031-5c7a-498a-9aee-5d9015ef1574" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.163798] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Lock "62413031-5c7a-498a-9aee-5d9015ef1574" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.213837] env[61974]: DEBUG nova.network.neutron [-] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.249178] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.830s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.249945] env[61974]: ERROR nova.compute.manager [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1c9380de-b680-410d-a930-68db86f0cdbd, please check neutron logs for more information. [ 609.249945] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] Traceback (most recent call last): [ 609.249945] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 609.249945] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] self.driver.spawn(context, instance, image_meta, [ 609.249945] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 609.249945] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 609.249945] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 609.249945] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] vm_ref = self.build_virtual_machine(instance, [ 609.249945] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 609.249945] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] vif_infos = vmwarevif.get_vif_info(self._session, [ 609.249945] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 609.250423] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] for vif in network_info: [ 609.250423] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 609.250423] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] return self._sync_wrapper(fn, *args, **kwargs) [ 609.250423] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 609.250423] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] self.wait() [ 609.250423] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 609.250423] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] self[:] = self._gt.wait() [ 609.250423] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 609.250423] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] return self._exit_event.wait() [ 609.250423] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 609.250423] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] result = hub.switch() [ 609.250423] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 609.250423] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] return self.greenlet.switch() [ 609.250735] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 609.250735] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] result = function(*args, **kwargs) [ 609.250735] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 609.250735] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] return func(*args, **kwargs) [ 609.250735] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 609.250735] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] raise e [ 609.250735] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 609.250735] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] nwinfo = self.network_api.allocate_for_instance( [ 609.250735] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 609.250735] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] created_port_ids = self._update_ports_for_instance( [ 609.250735] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 609.250735] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] with excutils.save_and_reraise_exception(): [ 609.250735] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 609.251053] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] self.force_reraise() [ 609.251053] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 609.251053] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] raise self.value [ 609.251053] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 609.251053] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] updated_port = self._update_port( [ 609.251053] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 609.251053] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] _ensure_no_port_binding_failure(port) [ 609.251053] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 609.251053] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] raise exception.PortBindingFailed(port_id=port['id']) [ 609.251053] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] nova.exception.PortBindingFailed: Binding failed for port 1c9380de-b680-410d-a930-68db86f0cdbd, please check neutron logs for more information. [ 609.251053] env[61974]: ERROR nova.compute.manager [instance: 811de811-d683-44b4-9a25-33923f235e3e] [ 609.251317] env[61974]: DEBUG nova.compute.utils [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Binding failed for port 1c9380de-b680-410d-a930-68db86f0cdbd, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 609.253299] env[61974]: DEBUG oslo_concurrency.lockutils [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 6.848s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.258515] env[61974]: DEBUG nova.compute.manager [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Build of instance 811de811-d683-44b4-9a25-33923f235e3e was re-scheduled: Binding failed for port 1c9380de-b680-410d-a930-68db86f0cdbd, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 609.259742] env[61974]: DEBUG nova.compute.manager [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 609.260071] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Acquiring lock "refresh_cache-811de811-d683-44b4-9a25-33923f235e3e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.260806] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Acquired lock "refresh_cache-811de811-d683-44b4-9a25-33923f235e3e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.261045] env[61974]: DEBUG nova.network.neutron [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 609.435757] env[61974]: DEBUG nova.compute.manager [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 609.469558] env[61974]: DEBUG nova.virt.hardware [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 609.470603] env[61974]: DEBUG nova.virt.hardware [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 609.470850] env[61974]: DEBUG nova.virt.hardware [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 609.471120] env[61974]: DEBUG nova.virt.hardware [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 609.471518] env[61974]: DEBUG nova.virt.hardware [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 609.471518] env[61974]: DEBUG nova.virt.hardware [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 609.471786] env[61974]: DEBUG nova.virt.hardware [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 609.471982] env[61974]: DEBUG nova.virt.hardware [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 609.472325] env[61974]: DEBUG nova.virt.hardware [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 609.472554] env[61974]: DEBUG nova.virt.hardware [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 609.472816] env[61974]: DEBUG nova.virt.hardware [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 609.473769] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ea0193-b3a2-43e7-bc4f-23a39241d15e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.484501] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2def904-7e03-4183-80f6-3036c335129e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.622094] env[61974]: DEBUG oslo_concurrency.lockutils [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Releasing lock "refresh_cache-88c3e670-b97a-4797-8821-cc24d2d07115" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.622563] env[61974]: DEBUG nova.compute.manager [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 609.622800] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 609.623137] env[61974]: DEBUG oslo_concurrency.lockutils [req-ed68f05e-dc22-491c-9c58-355973067259 req-ce5d756b-0396-43fd-922e-85d9401f1be2 service nova] Acquired lock "refresh_cache-88c3e670-b97a-4797-8821-cc24d2d07115" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.623321] env[61974]: DEBUG nova.network.neutron [req-ed68f05e-dc22-491c-9c58-355973067259 req-ce5d756b-0396-43fd-922e-85d9401f1be2 service nova] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Refreshing network info cache for port e4782adc-0b1b-4be5-a669-6d3dc3ac92d8 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 609.624454] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a0653165-d7c4-4fa1-b1f8-9948a2936e10 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.634331] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a15ddb77-0978-41a4-b0c2-4022504b0cb3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.660386] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 88c3e670-b97a-4797-8821-cc24d2d07115 could not be found. [ 609.660655] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 609.662831] env[61974]: INFO nova.compute.manager [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Took 0.04 seconds to destroy the instance on the hypervisor. [ 609.662831] env[61974]: DEBUG oslo.service.loopingcall [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 609.662831] env[61974]: DEBUG nova.compute.manager [-] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 609.662831] env[61974]: DEBUG nova.network.neutron [-] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 609.705739] env[61974]: DEBUG nova.network.neutron [-] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 609.716939] env[61974]: INFO nova.compute.manager [-] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Took 1.06 seconds to deallocate network for instance. [ 609.725145] env[61974]: DEBUG nova.compute.claims [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 609.725145] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.738023] env[61974]: DEBUG nova.network.neutron [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Successfully created port: 07d90eee-3f40-45bf-8041-27052da77ef2 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 609.795504] env[61974]: DEBUG nova.network.neutron [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 609.986879] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9bb80f-cdb6-4200-a671-e22ffe0d138e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.991836] env[61974]: DEBUG nova.network.neutron [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.001399] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1cbdbd-5fee-44b1-beb6-45bf30135873 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.034485] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74226adb-7616-47eb-b816-e4e418d69ec3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.042631] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e130426e-b448-4499-a4d7-02c209915115 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.056386] env[61974]: DEBUG nova.compute.provider_tree [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 610.174040] env[61974]: DEBUG nova.network.neutron [req-ed68f05e-dc22-491c-9c58-355973067259 req-ce5d756b-0396-43fd-922e-85d9401f1be2 service nova] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 610.212719] env[61974]: DEBUG nova.network.neutron [-] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.500179] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Releasing lock "refresh_cache-811de811-d683-44b4-9a25-33923f235e3e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 610.500421] env[61974]: DEBUG nova.compute.manager [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 610.500576] env[61974]: DEBUG nova.compute.manager [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 610.500748] env[61974]: DEBUG nova.network.neutron [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 610.543878] env[61974]: DEBUG nova.network.neutron [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 610.560663] env[61974]: DEBUG nova.scheduler.client.report [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 610.716386] env[61974]: INFO nova.compute.manager [-] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Took 1.05 seconds to deallocate network for instance. [ 610.719368] env[61974]: DEBUG nova.compute.claims [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 610.719549] env[61974]: DEBUG oslo_concurrency.lockutils [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.720723] env[61974]: DEBUG nova.network.neutron [req-ed68f05e-dc22-491c-9c58-355973067259 req-ce5d756b-0396-43fd-922e-85d9401f1be2 service nova] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.817161] env[61974]: ERROR nova.compute.manager [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b24ecf01-24e9-4274-a294-0789bdbed06d, please check neutron logs for more information. [ 610.817161] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 610.817161] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 610.817161] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 610.817161] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 610.817161] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 610.817161] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 610.817161] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 610.817161] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 610.817161] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 610.817161] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 610.817161] env[61974]: ERROR nova.compute.manager raise self.value [ 610.817161] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 610.817161] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 610.817161] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 610.817161] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 610.817663] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 610.817663] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 610.817663] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b24ecf01-24e9-4274-a294-0789bdbed06d, please check neutron logs for more information. [ 610.817663] env[61974]: ERROR nova.compute.manager [ 610.817663] env[61974]: Traceback (most recent call last): [ 610.817663] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 610.817663] env[61974]: listener.cb(fileno) [ 610.817663] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 610.817663] env[61974]: result = function(*args, **kwargs) [ 610.817663] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 610.817663] env[61974]: return func(*args, **kwargs) [ 610.817663] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 610.817663] env[61974]: raise e [ 610.817663] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 610.817663] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 610.817663] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 610.817663] env[61974]: created_port_ids = self._update_ports_for_instance( [ 610.817663] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 610.817663] env[61974]: with excutils.save_and_reraise_exception(): [ 610.817663] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 610.817663] env[61974]: self.force_reraise() [ 610.817663] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 610.817663] env[61974]: raise self.value [ 610.817663] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 610.817663] env[61974]: updated_port = self._update_port( [ 610.817663] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 610.817663] env[61974]: _ensure_no_port_binding_failure(port) [ 610.817663] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 610.817663] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 610.819511] env[61974]: nova.exception.PortBindingFailed: Binding failed for port b24ecf01-24e9-4274-a294-0789bdbed06d, please check neutron logs for more information. [ 610.819511] env[61974]: Removing descriptor: 17 [ 610.819511] env[61974]: ERROR nova.compute.manager [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b24ecf01-24e9-4274-a294-0789bdbed06d, please check neutron logs for more information. [ 610.819511] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Traceback (most recent call last): [ 610.819511] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 610.819511] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] yield resources [ 610.819511] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 610.819511] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] self.driver.spawn(context, instance, image_meta, [ 610.819511] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 610.819511] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] self._vmops.spawn(context, instance, image_meta, injected_files, [ 610.819511] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 610.819511] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] vm_ref = self.build_virtual_machine(instance, [ 610.819831] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 610.819831] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] vif_infos = vmwarevif.get_vif_info(self._session, [ 610.819831] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 610.819831] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] for vif in network_info: [ 610.819831] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 610.819831] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] return self._sync_wrapper(fn, *args, **kwargs) [ 610.819831] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 610.819831] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] self.wait() [ 610.819831] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 610.819831] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] self[:] = self._gt.wait() [ 610.819831] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 610.819831] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] return self._exit_event.wait() [ 610.819831] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 610.820156] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] result = hub.switch() [ 610.820156] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 610.820156] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] return self.greenlet.switch() [ 610.820156] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 610.820156] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] result = function(*args, **kwargs) [ 610.820156] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 610.820156] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] return func(*args, **kwargs) [ 610.820156] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 610.820156] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] raise e [ 610.820156] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 610.820156] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] nwinfo = self.network_api.allocate_for_instance( [ 610.820156] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 610.820156] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] created_port_ids = self._update_ports_for_instance( [ 610.820524] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 610.820524] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] with excutils.save_and_reraise_exception(): [ 610.820524] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 610.820524] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] self.force_reraise() [ 610.820524] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 610.820524] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] raise self.value [ 610.820524] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 610.820524] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] updated_port = self._update_port( [ 610.820524] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 610.820524] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] _ensure_no_port_binding_failure(port) [ 610.820524] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 610.820524] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] raise exception.PortBindingFailed(port_id=port['id']) [ 610.820889] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] nova.exception.PortBindingFailed: Binding failed for port b24ecf01-24e9-4274-a294-0789bdbed06d, please check neutron logs for more information. [ 610.820889] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] [ 610.820889] env[61974]: INFO nova.compute.manager [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Terminating instance [ 610.824238] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Acquiring lock "refresh_cache-722b2d1a-9b8a-4120-85ce-c15cddd46479" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.824238] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Acquired lock "refresh_cache-722b2d1a-9b8a-4120-85ce-c15cddd46479" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.824238] env[61974]: DEBUG nova.network.neutron [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 611.021523] env[61974]: DEBUG oslo_concurrency.lockutils [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Acquiring lock "622aca09-aab9-4e93-b4d3-621d33df7903" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.021523] env[61974]: DEBUG oslo_concurrency.lockutils [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Lock "622aca09-aab9-4e93-b4d3-621d33df7903" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.048080] env[61974]: DEBUG nova.network.neutron [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.069554] env[61974]: DEBUG oslo_concurrency.lockutils [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.816s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.070360] env[61974]: ERROR nova.compute.manager [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6f5a8b7d-3eac-472e-9d90-a9da5e2597eb, please check neutron logs for more information. [ 611.070360] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Traceback (most recent call last): [ 611.070360] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 611.070360] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] self.driver.spawn(context, instance, image_meta, [ 611.070360] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 611.070360] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 611.070360] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 611.070360] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] vm_ref = self.build_virtual_machine(instance, [ 611.070360] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 611.070360] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] vif_infos = vmwarevif.get_vif_info(self._session, [ 611.070360] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 611.070712] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] for vif in network_info: [ 611.070712] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 611.070712] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] return self._sync_wrapper(fn, *args, **kwargs) [ 611.070712] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 611.070712] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] self.wait() [ 611.070712] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 611.070712] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] self[:] = self._gt.wait() [ 611.070712] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 611.070712] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] return self._exit_event.wait() [ 611.070712] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 611.070712] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] result = hub.switch() [ 611.070712] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 611.070712] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] return self.greenlet.switch() [ 611.071093] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 611.071093] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] result = function(*args, **kwargs) [ 611.071093] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 611.071093] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] return func(*args, **kwargs) [ 611.071093] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 611.071093] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] raise e [ 611.071093] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 611.071093] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] nwinfo = self.network_api.allocate_for_instance( [ 611.071093] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 611.071093] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] created_port_ids = self._update_ports_for_instance( [ 611.071093] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 611.071093] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] with excutils.save_and_reraise_exception(): [ 611.071093] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 611.071521] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] self.force_reraise() [ 611.071521] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 611.071521] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] raise self.value [ 611.071521] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 611.071521] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] updated_port = self._update_port( [ 611.071521] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 611.071521] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] _ensure_no_port_binding_failure(port) [ 611.071521] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 611.071521] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] raise exception.PortBindingFailed(port_id=port['id']) [ 611.071521] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] nova.exception.PortBindingFailed: Binding failed for port 6f5a8b7d-3eac-472e-9d90-a9da5e2597eb, please check neutron logs for more information. [ 611.071521] env[61974]: ERROR nova.compute.manager [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] [ 611.074073] env[61974]: DEBUG nova.compute.utils [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Binding failed for port 6f5a8b7d-3eac-472e-9d90-a9da5e2597eb, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 611.075452] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 5.923s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.079421] env[61974]: DEBUG nova.compute.manager [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Build of instance a054a98f-e7b4-422c-bd5b-4e478f4a94d2 was re-scheduled: Binding failed for port 6f5a8b7d-3eac-472e-9d90-a9da5e2597eb, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 611.079880] env[61974]: DEBUG nova.compute.manager [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 611.081425] env[61974]: DEBUG oslo_concurrency.lockutils [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Acquiring lock "refresh_cache-a054a98f-e7b4-422c-bd5b-4e478f4a94d2" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.081425] env[61974]: DEBUG oslo_concurrency.lockutils [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Acquired lock "refresh_cache-a054a98f-e7b4-422c-bd5b-4e478f4a94d2" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.081568] env[61974]: DEBUG nova.network.neutron [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 611.084011] env[61974]: DEBUG nova.compute.manager [req-f1425b52-89da-4b04-92f7-9db742053b67 req-aad69397-f958-401b-8731-0f34df955e19 service nova] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Received event network-changed-7e9e80d1-55d8-43af-bbba-f654b8b19a26 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 611.085832] env[61974]: DEBUG nova.compute.manager [req-f1425b52-89da-4b04-92f7-9db742053b67 req-aad69397-f958-401b-8731-0f34df955e19 service nova] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Refreshing instance network info cache due to event network-changed-7e9e80d1-55d8-43af-bbba-f654b8b19a26. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 611.085832] env[61974]: DEBUG oslo_concurrency.lockutils [req-f1425b52-89da-4b04-92f7-9db742053b67 req-aad69397-f958-401b-8731-0f34df955e19 service nova] Acquiring lock "refresh_cache-b534ef37-c3d0-478e-86a9-4794251a00a1" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.085832] env[61974]: DEBUG oslo_concurrency.lockutils [req-f1425b52-89da-4b04-92f7-9db742053b67 req-aad69397-f958-401b-8731-0f34df955e19 service nova] Acquired lock "refresh_cache-b534ef37-c3d0-478e-86a9-4794251a00a1" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.085832] env[61974]: DEBUG nova.network.neutron [req-f1425b52-89da-4b04-92f7-9db742053b67 req-aad69397-f958-401b-8731-0f34df955e19 service nova] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Refreshing network info cache for port 7e9e80d1-55d8-43af-bbba-f654b8b19a26 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 611.218859] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Acquiring lock "41fccade-6e5f-4642-8889-2ce00dbff1c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.219444] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Lock "41fccade-6e5f-4642-8889-2ce00dbff1c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.223950] env[61974]: DEBUG oslo_concurrency.lockutils [req-ed68f05e-dc22-491c-9c58-355973067259 req-ce5d756b-0396-43fd-922e-85d9401f1be2 service nova] Releasing lock "refresh_cache-88c3e670-b97a-4797-8821-cc24d2d07115" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.376693] env[61974]: DEBUG nova.network.neutron [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 611.551236] env[61974]: INFO nova.compute.manager [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 811de811-d683-44b4-9a25-33923f235e3e] Took 1.05 seconds to deallocate network for instance. [ 611.626918] env[61974]: DEBUG nova.network.neutron [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.628747] env[61974]: DEBUG nova.network.neutron [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 611.653588] env[61974]: DEBUG nova.network.neutron [req-f1425b52-89da-4b04-92f7-9db742053b67 req-aad69397-f958-401b-8731-0f34df955e19 service nova] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 611.784274] env[61974]: DEBUG nova.network.neutron [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.863801] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38433985-1f44-44bc-bad3-da5416299222 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.873472] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc0c2f29-4e21-40fc-80bd-679a74bbe6d6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.912154] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a28cfc4a-8424-4f8f-a9ce-300011f89bc1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.921341] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b567e0e7-e290-4365-8028-3b3df71465b0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.936680] env[61974]: DEBUG nova.compute.provider_tree [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 612.051077] env[61974]: DEBUG nova.network.neutron [req-f1425b52-89da-4b04-92f7-9db742053b67 req-aad69397-f958-401b-8731-0f34df955e19 service nova] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.131325] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Releasing lock "refresh_cache-722b2d1a-9b8a-4120-85ce-c15cddd46479" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 612.132367] env[61974]: DEBUG nova.compute.manager [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 612.132671] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 612.132869] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-365abe65-610b-4d55-9ad1-28e4daf7cc39 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.142949] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc10d75-1860-4304-b736-31e835a25257 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.170861] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 722b2d1a-9b8a-4120-85ce-c15cddd46479 could not be found. [ 612.171145] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 612.171368] env[61974]: INFO nova.compute.manager [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Took 0.04 seconds to destroy the instance on the hypervisor. [ 612.171735] env[61974]: DEBUG oslo.service.loopingcall [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 612.171963] env[61974]: DEBUG nova.compute.manager [-] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 612.172491] env[61974]: DEBUG nova.network.neutron [-] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 612.225328] env[61974]: DEBUG nova.network.neutron [-] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 612.287984] env[61974]: DEBUG oslo_concurrency.lockutils [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Releasing lock "refresh_cache-a054a98f-e7b4-422c-bd5b-4e478f4a94d2" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 612.289766] env[61974]: DEBUG nova.compute.manager [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 612.290020] env[61974]: DEBUG nova.compute.manager [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 612.290202] env[61974]: DEBUG nova.network.neutron [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 612.339229] env[61974]: ERROR nova.compute.manager [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fb14e838-6ca0-4225-902c-d9ca7d0ce70a, please check neutron logs for more information. [ 612.339229] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 612.339229] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 612.339229] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 612.339229] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 612.339229] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 612.339229] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 612.339229] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 612.339229] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.339229] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 612.339229] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.339229] env[61974]: ERROR nova.compute.manager raise self.value [ 612.339229] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 612.339229] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 612.339229] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.339229] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 612.339752] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.339752] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 612.339752] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fb14e838-6ca0-4225-902c-d9ca7d0ce70a, please check neutron logs for more information. [ 612.339752] env[61974]: ERROR nova.compute.manager [ 612.339752] env[61974]: Traceback (most recent call last): [ 612.339752] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 612.339752] env[61974]: listener.cb(fileno) [ 612.339752] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 612.339752] env[61974]: result = function(*args, **kwargs) [ 612.339752] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 612.339752] env[61974]: return func(*args, **kwargs) [ 612.339752] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 612.339752] env[61974]: raise e [ 612.339752] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 612.339752] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 612.339752] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 612.339752] env[61974]: created_port_ids = self._update_ports_for_instance( [ 612.339752] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 612.339752] env[61974]: with excutils.save_and_reraise_exception(): [ 612.339752] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.339752] env[61974]: self.force_reraise() [ 612.339752] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.339752] env[61974]: raise self.value [ 612.339752] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 612.339752] env[61974]: updated_port = self._update_port( [ 612.339752] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.339752] env[61974]: _ensure_no_port_binding_failure(port) [ 612.339752] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.339752] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 612.341283] env[61974]: nova.exception.PortBindingFailed: Binding failed for port fb14e838-6ca0-4225-902c-d9ca7d0ce70a, please check neutron logs for more information. [ 612.341283] env[61974]: Removing descriptor: 18 [ 612.341283] env[61974]: ERROR nova.compute.manager [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fb14e838-6ca0-4225-902c-d9ca7d0ce70a, please check neutron logs for more information. [ 612.341283] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Traceback (most recent call last): [ 612.341283] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 612.341283] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] yield resources [ 612.341283] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 612.341283] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] self.driver.spawn(context, instance, image_meta, [ 612.341283] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 612.341283] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 612.341283] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 612.341283] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] vm_ref = self.build_virtual_machine(instance, [ 612.341644] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 612.341644] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] vif_infos = vmwarevif.get_vif_info(self._session, [ 612.341644] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 612.341644] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] for vif in network_info: [ 612.341644] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 612.341644] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] return self._sync_wrapper(fn, *args, **kwargs) [ 612.341644] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 612.341644] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] self.wait() [ 612.341644] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 612.341644] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] self[:] = self._gt.wait() [ 612.341644] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 612.341644] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] return self._exit_event.wait() [ 612.341644] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 612.342067] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] result = hub.switch() [ 612.342067] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 612.342067] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] return self.greenlet.switch() [ 612.342067] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 612.342067] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] result = function(*args, **kwargs) [ 612.342067] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 612.342067] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] return func(*args, **kwargs) [ 612.342067] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 612.342067] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] raise e [ 612.342067] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 612.342067] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] nwinfo = self.network_api.allocate_for_instance( [ 612.342067] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 612.342067] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] created_port_ids = self._update_ports_for_instance( [ 612.343849] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 612.343849] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] with excutils.save_and_reraise_exception(): [ 612.343849] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.343849] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] self.force_reraise() [ 612.343849] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.343849] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] raise self.value [ 612.343849] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 612.343849] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] updated_port = self._update_port( [ 612.343849] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.343849] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] _ensure_no_port_binding_failure(port) [ 612.343849] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.343849] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] raise exception.PortBindingFailed(port_id=port['id']) [ 612.344555] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] nova.exception.PortBindingFailed: Binding failed for port fb14e838-6ca0-4225-902c-d9ca7d0ce70a, please check neutron logs for more information. [ 612.344555] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] [ 612.344555] env[61974]: INFO nova.compute.manager [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Terminating instance [ 612.344555] env[61974]: DEBUG nova.network.neutron [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 612.345281] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Acquiring lock "refresh_cache-9b3b4b80-9de4-4e59-b211-38bc8d9caeae" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.345281] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Acquired lock "refresh_cache-9b3b4b80-9de4-4e59-b211-38bc8d9caeae" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.349041] env[61974]: DEBUG nova.network.neutron [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 612.392130] env[61974]: DEBUG nova.compute.manager [req-e4c79298-0a1a-44c6-8edb-1cf27ddc573c req-98bcc8f7-698d-4d5d-be05-47f2b8b95ab0 service nova] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Received event network-vif-deleted-e4782adc-0b1b-4be5-a669-6d3dc3ac92d8 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 612.439966] env[61974]: DEBUG nova.scheduler.client.report [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 612.555220] env[61974]: DEBUG oslo_concurrency.lockutils [req-f1425b52-89da-4b04-92f7-9db742053b67 req-aad69397-f958-401b-8731-0f34df955e19 service nova] Releasing lock "refresh_cache-b534ef37-c3d0-478e-86a9-4794251a00a1" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 612.555220] env[61974]: DEBUG nova.compute.manager [req-f1425b52-89da-4b04-92f7-9db742053b67 req-aad69397-f958-401b-8731-0f34df955e19 service nova] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Received event network-vif-deleted-7e9e80d1-55d8-43af-bbba-f654b8b19a26 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 612.607387] env[61974]: INFO nova.scheduler.client.report [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Deleted allocations for instance 811de811-d683-44b4-9a25-33923f235e3e [ 612.729358] env[61974]: DEBUG nova.network.neutron [-] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.849880] env[61974]: DEBUG nova.network.neutron [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.917651] env[61974]: DEBUG nova.network.neutron [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 612.949811] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.873s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.949811] env[61974]: ERROR nova.compute.manager [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4a5a632c-ff7c-4a1e-b413-6abfb920fb9a, please check neutron logs for more information. [ 612.949811] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Traceback (most recent call last): [ 612.949811] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 612.949811] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] self.driver.spawn(context, instance, image_meta, [ 612.949811] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 612.949811] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 612.949811] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 612.949811] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] vm_ref = self.build_virtual_machine(instance, [ 612.950313] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 612.950313] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] vif_infos = vmwarevif.get_vif_info(self._session, [ 612.950313] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 612.950313] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] for vif in network_info: [ 612.950313] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 612.950313] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] return self._sync_wrapper(fn, *args, **kwargs) [ 612.950313] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 612.950313] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] self.wait() [ 612.950313] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 612.950313] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] self[:] = self._gt.wait() [ 612.950313] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 612.950313] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] return self._exit_event.wait() [ 612.950313] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 612.950704] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] result = hub.switch() [ 612.950704] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 612.950704] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] return self.greenlet.switch() [ 612.950704] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 612.950704] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] result = function(*args, **kwargs) [ 612.950704] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 612.950704] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] return func(*args, **kwargs) [ 612.950704] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 612.950704] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] raise e [ 612.950704] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 612.950704] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] nwinfo = self.network_api.allocate_for_instance( [ 612.950704] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 612.950704] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] created_port_ids = self._update_ports_for_instance( [ 612.951065] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 612.951065] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] with excutils.save_and_reraise_exception(): [ 612.951065] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.951065] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] self.force_reraise() [ 612.951065] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.951065] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] raise self.value [ 612.951065] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 612.951065] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] updated_port = self._update_port( [ 612.951065] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.951065] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] _ensure_no_port_binding_failure(port) [ 612.951065] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.951065] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] raise exception.PortBindingFailed(port_id=port['id']) [ 612.951356] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] nova.exception.PortBindingFailed: Binding failed for port 4a5a632c-ff7c-4a1e-b413-6abfb920fb9a, please check neutron logs for more information. [ 612.951356] env[61974]: ERROR nova.compute.manager [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] [ 612.951356] env[61974]: DEBUG nova.compute.utils [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Binding failed for port 4a5a632c-ff7c-4a1e-b413-6abfb920fb9a, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 612.953199] env[61974]: DEBUG nova.compute.manager [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Build of instance b637fed5-951c-4e8e-95ae-410e1ec3ecc8 was re-scheduled: Binding failed for port 4a5a632c-ff7c-4a1e-b413-6abfb920fb9a, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 612.958188] env[61974]: DEBUG nova.compute.manager [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 612.958188] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Acquiring lock "refresh_cache-b637fed5-951c-4e8e-95ae-410e1ec3ecc8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.958188] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Acquired lock "refresh_cache-b637fed5-951c-4e8e-95ae-410e1ec3ecc8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.958341] env[61974]: DEBUG nova.network.neutron [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 612.962889] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 7.750s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.117304] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d028c957-7034-4926-9ddc-51cee18a292c tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Lock "811de811-d683-44b4-9a25-33923f235e3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.202s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.120328] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "811de811-d683-44b4-9a25-33923f235e3e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 14.428s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.120328] env[61974]: INFO nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 811de811-d683-44b4-9a25-33923f235e3e] During sync_power_state the instance has a pending task (spawning). Skip. [ 613.120854] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "811de811-d683-44b4-9a25-33923f235e3e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.234824] env[61974]: INFO nova.compute.manager [-] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Took 1.06 seconds to deallocate network for instance. [ 613.236407] env[61974]: DEBUG nova.compute.claims [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 613.236407] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.363273] env[61974]: INFO nova.compute.manager [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] Took 1.07 seconds to deallocate network for instance. [ 613.608878] env[61974]: DEBUG nova.network.neutron [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.622894] env[61974]: DEBUG nova.network.neutron [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 613.626061] env[61974]: DEBUG nova.compute.manager [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 614.024044] env[61974]: DEBUG nova.network.neutron [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.115208] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Releasing lock "refresh_cache-9b3b4b80-9de4-4e59-b211-38bc8d9caeae" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 614.115208] env[61974]: DEBUG nova.compute.manager [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 614.115208] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 614.117863] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f8dbd14-380a-4e99-9927-6a74ad29bf11 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.130023] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b7254c-dbd6-420a-a377-6a2688649d48 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.161532] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9b3b4b80-9de4-4e59-b211-38bc8d9caeae could not be found. [ 614.161532] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 614.161532] env[61974]: INFO nova.compute.manager [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Took 0.05 seconds to destroy the instance on the hypervisor. [ 614.161532] env[61974]: DEBUG oslo.service.loopingcall [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 614.161532] env[61974]: DEBUG nova.compute.manager [-] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 614.161532] env[61974]: DEBUG nova.network.neutron [-] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 614.163594] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.232436] env[61974]: DEBUG nova.compute.manager [req-159fdc25-76af-48a5-92b3-e84326ac5453 req-284cd93f-a405-4855-89d2-e52df1120a56 service nova] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Received event network-changed-b24ecf01-24e9-4274-a294-0789bdbed06d {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 614.233091] env[61974]: DEBUG nova.compute.manager [req-159fdc25-76af-48a5-92b3-e84326ac5453 req-284cd93f-a405-4855-89d2-e52df1120a56 service nova] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Refreshing instance network info cache due to event network-changed-b24ecf01-24e9-4274-a294-0789bdbed06d. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 614.233091] env[61974]: DEBUG oslo_concurrency.lockutils [req-159fdc25-76af-48a5-92b3-e84326ac5453 req-284cd93f-a405-4855-89d2-e52df1120a56 service nova] Acquiring lock "refresh_cache-722b2d1a-9b8a-4120-85ce-c15cddd46479" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 614.233269] env[61974]: DEBUG oslo_concurrency.lockutils [req-159fdc25-76af-48a5-92b3-e84326ac5453 req-284cd93f-a405-4855-89d2-e52df1120a56 service nova] Acquired lock "refresh_cache-722b2d1a-9b8a-4120-85ce-c15cddd46479" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.233960] env[61974]: DEBUG nova.network.neutron [req-159fdc25-76af-48a5-92b3-e84326ac5453 req-284cd93f-a405-4855-89d2-e52df1120a56 service nova] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Refreshing network info cache for port b24ecf01-24e9-4274-a294-0789bdbed06d {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 614.407224] env[61974]: INFO nova.scheduler.client.report [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Deleted allocations for instance a054a98f-e7b4-422c-bd5b-4e478f4a94d2 [ 614.418562] env[61974]: DEBUG nova.network.neutron [-] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 614.511383] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance a054a98f-e7b4-422c-bd5b-4e478f4a94d2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 614.526825] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Releasing lock "refresh_cache-b637fed5-951c-4e8e-95ae-410e1ec3ecc8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 614.528786] env[61974]: DEBUG nova.compute.manager [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 614.528786] env[61974]: DEBUG nova.compute.manager [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 614.528786] env[61974]: DEBUG nova.network.neutron [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 614.578200] env[61974]: DEBUG nova.network.neutron [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 614.800143] env[61974]: DEBUG nova.network.neutron [req-159fdc25-76af-48a5-92b3-e84326ac5453 req-284cd93f-a405-4855-89d2-e52df1120a56 service nova] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 614.918716] env[61974]: ERROR nova.compute.manager [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 07d90eee-3f40-45bf-8041-27052da77ef2, please check neutron logs for more information. [ 614.918716] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 614.918716] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 614.918716] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 614.918716] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 614.918716] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 614.918716] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 614.918716] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 614.918716] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 614.918716] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 614.918716] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 614.918716] env[61974]: ERROR nova.compute.manager raise self.value [ 614.918716] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 614.918716] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 614.918716] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 614.918716] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 614.919205] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 614.919205] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 614.919205] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 07d90eee-3f40-45bf-8041-27052da77ef2, please check neutron logs for more information. [ 614.919205] env[61974]: ERROR nova.compute.manager [ 614.919205] env[61974]: Traceback (most recent call last): [ 614.919205] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 614.919205] env[61974]: listener.cb(fileno) [ 614.919205] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 614.919205] env[61974]: result = function(*args, **kwargs) [ 614.919205] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 614.919205] env[61974]: return func(*args, **kwargs) [ 614.919205] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 614.919205] env[61974]: raise e [ 614.919205] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 614.919205] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 614.919205] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 614.919205] env[61974]: created_port_ids = self._update_ports_for_instance( [ 614.919205] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 614.919205] env[61974]: with excutils.save_and_reraise_exception(): [ 614.919205] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 614.919205] env[61974]: self.force_reraise() [ 614.919205] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 614.919205] env[61974]: raise self.value [ 614.919205] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 614.919205] env[61974]: updated_port = self._update_port( [ 614.919205] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 614.919205] env[61974]: _ensure_no_port_binding_failure(port) [ 614.919205] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 614.919205] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 614.919951] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 07d90eee-3f40-45bf-8041-27052da77ef2, please check neutron logs for more information. [ 614.919951] env[61974]: Removing descriptor: 20 [ 614.919951] env[61974]: DEBUG oslo_concurrency.lockutils [None req-24e3bdb5-68ee-4635-9c7e-c074d09140cb tempest-ServerExternalEventsTest-1842039609 tempest-ServerExternalEventsTest-1842039609-project-member] Lock "a054a98f-e7b4-422c-bd5b-4e478f4a94d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.098s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.920077] env[61974]: ERROR nova.compute.manager [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 07d90eee-3f40-45bf-8041-27052da77ef2, please check neutron logs for more information. [ 614.920077] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] Traceback (most recent call last): [ 614.920077] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 614.920077] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] yield resources [ 614.920077] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 614.920077] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] self.driver.spawn(context, instance, image_meta, [ 614.920077] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 614.920077] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] self._vmops.spawn(context, instance, image_meta, injected_files, [ 614.920077] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 614.920077] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] vm_ref = self.build_virtual_machine(instance, [ 614.920077] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 614.920378] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] vif_infos = vmwarevif.get_vif_info(self._session, [ 614.920378] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 614.920378] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] for vif in network_info: [ 614.920378] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 614.920378] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] return self._sync_wrapper(fn, *args, **kwargs) [ 614.920378] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 614.920378] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] self.wait() [ 614.920378] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 614.920378] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] self[:] = self._gt.wait() [ 614.920378] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 614.920378] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] return self._exit_event.wait() [ 614.920378] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 614.920378] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] result = hub.switch() [ 614.920691] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 614.920691] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] return self.greenlet.switch() [ 614.920691] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 614.920691] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] result = function(*args, **kwargs) [ 614.920691] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 614.920691] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] return func(*args, **kwargs) [ 614.920691] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 614.920691] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] raise e [ 614.920691] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 614.920691] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] nwinfo = self.network_api.allocate_for_instance( [ 614.920691] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 614.920691] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] created_port_ids = self._update_ports_for_instance( [ 614.920691] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 614.921016] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] with excutils.save_and_reraise_exception(): [ 614.921016] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 614.921016] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] self.force_reraise() [ 614.921016] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 614.921016] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] raise self.value [ 614.921016] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 614.921016] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] updated_port = self._update_port( [ 614.921016] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 614.921016] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] _ensure_no_port_binding_failure(port) [ 614.921016] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 614.921016] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] raise exception.PortBindingFailed(port_id=port['id']) [ 614.921016] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] nova.exception.PortBindingFailed: Binding failed for port 07d90eee-3f40-45bf-8041-27052da77ef2, please check neutron logs for more information. [ 614.921016] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] [ 614.921339] env[61974]: INFO nova.compute.manager [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Terminating instance [ 614.923846] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "a054a98f-e7b4-422c-bd5b-4e478f4a94d2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 16.231s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.923846] env[61974]: INFO nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: a054a98f-e7b4-422c-bd5b-4e478f4a94d2] During sync_power_state the instance has a pending task (spawning). Skip. [ 614.923846] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "a054a98f-e7b4-422c-bd5b-4e478f4a94d2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.923846] env[61974]: DEBUG nova.network.neutron [-] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.929081] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Acquiring lock "refresh_cache-43e272e2-9256-4535-882e-3954574d5485" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 614.929081] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Acquired lock "refresh_cache-43e272e2-9256-4535-882e-3954574d5485" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.929081] env[61974]: DEBUG nova.network.neutron [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 615.017278] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance b637fed5-951c-4e8e-95ae-410e1ec3ecc8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 615.017445] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance b15f2e60-9ea6-49ea-be71-6770d3f48e1d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 615.017571] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance b534ef37-c3d0-478e-86a9-4794251a00a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 615.017688] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 88c3e670-b97a-4797-8821-cc24d2d07115 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 615.017833] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 722b2d1a-9b8a-4120-85ce-c15cddd46479 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 615.017907] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 9b3b4b80-9de4-4e59-b211-38bc8d9caeae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 615.018033] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 43e272e2-9256-4535-882e-3954574d5485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 615.084614] env[61974]: DEBUG nova.network.neutron [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.197612] env[61974]: DEBUG nova.network.neutron [req-159fdc25-76af-48a5-92b3-e84326ac5453 req-284cd93f-a405-4855-89d2-e52df1120a56 service nova] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.427861] env[61974]: DEBUG nova.compute.manager [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 615.431272] env[61974]: INFO nova.compute.manager [-] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Took 1.27 seconds to deallocate network for instance. [ 615.435743] env[61974]: DEBUG nova.compute.claims [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 615.435743] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.469118] env[61974]: DEBUG nova.network.neutron [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 615.525458] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 2c4e7c81-67fd-4c5f-9e96-7256c26b228a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 615.590327] env[61974]: INFO nova.compute.manager [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] Took 1.06 seconds to deallocate network for instance. [ 615.694753] env[61974]: DEBUG nova.network.neutron [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.699315] env[61974]: DEBUG oslo_concurrency.lockutils [req-159fdc25-76af-48a5-92b3-e84326ac5453 req-284cd93f-a405-4855-89d2-e52df1120a56 service nova] Releasing lock "refresh_cache-722b2d1a-9b8a-4120-85ce-c15cddd46479" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.699556] env[61974]: DEBUG nova.compute.manager [req-159fdc25-76af-48a5-92b3-e84326ac5453 req-284cd93f-a405-4855-89d2-e52df1120a56 service nova] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Received event network-vif-deleted-b24ecf01-24e9-4274-a294-0789bdbed06d {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 615.699892] env[61974]: DEBUG nova.compute.manager [req-159fdc25-76af-48a5-92b3-e84326ac5453 req-284cd93f-a405-4855-89d2-e52df1120a56 service nova] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Received event network-changed-fb14e838-6ca0-4225-902c-d9ca7d0ce70a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 615.699976] env[61974]: DEBUG nova.compute.manager [req-159fdc25-76af-48a5-92b3-e84326ac5453 req-284cd93f-a405-4855-89d2-e52df1120a56 service nova] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Refreshing instance network info cache due to event network-changed-fb14e838-6ca0-4225-902c-d9ca7d0ce70a. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 615.700151] env[61974]: DEBUG oslo_concurrency.lockutils [req-159fdc25-76af-48a5-92b3-e84326ac5453 req-284cd93f-a405-4855-89d2-e52df1120a56 service nova] Acquiring lock "refresh_cache-9b3b4b80-9de4-4e59-b211-38bc8d9caeae" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.700298] env[61974]: DEBUG oslo_concurrency.lockutils [req-159fdc25-76af-48a5-92b3-e84326ac5453 req-284cd93f-a405-4855-89d2-e52df1120a56 service nova] Acquired lock "refresh_cache-9b3b4b80-9de4-4e59-b211-38bc8d9caeae" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.700548] env[61974]: DEBUG nova.network.neutron [req-159fdc25-76af-48a5-92b3-e84326ac5453 req-284cd93f-a405-4855-89d2-e52df1120a56 service nova] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Refreshing network info cache for port fb14e838-6ca0-4225-902c-d9ca7d0ce70a {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 615.959310] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.029143] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 287496bf-b981-41d5-81fc-791d793c244e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.198684] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Releasing lock "refresh_cache-43e272e2-9256-4535-882e-3954574d5485" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 616.201855] env[61974]: DEBUG nova.compute.manager [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 616.201855] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 616.201855] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d4730231-bef4-4894-9ff3-873e1b6cb7a0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.217015] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c70b43-b062-4b8d-8f1b-b43dff633129 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.240084] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 43e272e2-9256-4535-882e-3954574d5485 could not be found. [ 616.240503] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 616.240822] env[61974]: INFO nova.compute.manager [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Took 0.04 seconds to destroy the instance on the hypervisor. [ 616.241203] env[61974]: DEBUG oslo.service.loopingcall [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 616.241561] env[61974]: DEBUG nova.compute.manager [-] [instance: 43e272e2-9256-4535-882e-3954574d5485] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 616.244212] env[61974]: DEBUG nova.network.neutron [-] [instance: 43e272e2-9256-4535-882e-3954574d5485] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 616.428258] env[61974]: DEBUG nova.network.neutron [req-159fdc25-76af-48a5-92b3-e84326ac5453 req-284cd93f-a405-4855-89d2-e52df1120a56 service nova] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 616.442504] env[61974]: DEBUG nova.network.neutron [-] [instance: 43e272e2-9256-4535-882e-3954574d5485] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 616.531121] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 62413031-5c7a-498a-9aee-5d9015ef1574 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.626085] env[61974]: DEBUG nova.network.neutron [req-159fdc25-76af-48a5-92b3-e84326ac5453 req-284cd93f-a405-4855-89d2-e52df1120a56 service nova] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.627218] env[61974]: INFO nova.scheduler.client.report [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Deleted allocations for instance b637fed5-951c-4e8e-95ae-410e1ec3ecc8 [ 616.946350] env[61974]: DEBUG nova.network.neutron [-] [instance: 43e272e2-9256-4535-882e-3954574d5485] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.035056] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 622aca09-aab9-4e93-b4d3-621d33df7903 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 617.102216] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Acquiring lock "ef64bb0a-d462-4218-9ddf-7c019727f2ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.103032] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Lock "ef64bb0a-d462-4218-9ddf-7c019727f2ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.133995] env[61974]: DEBUG oslo_concurrency.lockutils [req-159fdc25-76af-48a5-92b3-e84326ac5453 req-284cd93f-a405-4855-89d2-e52df1120a56 service nova] Releasing lock "refresh_cache-9b3b4b80-9de4-4e59-b211-38bc8d9caeae" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 617.134224] env[61974]: DEBUG nova.compute.manager [req-159fdc25-76af-48a5-92b3-e84326ac5453 req-284cd93f-a405-4855-89d2-e52df1120a56 service nova] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Received event network-vif-deleted-fb14e838-6ca0-4225-902c-d9ca7d0ce70a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 617.138974] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5be19f04-0f8d-4be8-8bd7-791f011e7ee5 tempest-ServerDiagnosticsNegativeTest-1250126838 tempest-ServerDiagnosticsNegativeTest-1250126838-project-member] Lock "b637fed5-951c-4e8e-95ae-410e1ec3ecc8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.623s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.140436] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "b637fed5-951c-4e8e-95ae-410e1ec3ecc8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 18.448s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.141255] env[61974]: INFO nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: b637fed5-951c-4e8e-95ae-410e1ec3ecc8] During sync_power_state the instance has a pending task (spawning). Skip. [ 617.141255] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "b637fed5-951c-4e8e-95ae-410e1ec3ecc8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.450612] env[61974]: INFO nova.compute.manager [-] [instance: 43e272e2-9256-4535-882e-3954574d5485] Took 1.21 seconds to deallocate network for instance. [ 617.453176] env[61974]: DEBUG nova.compute.claims [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 617.453357] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.541046] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 41fccade-6e5f-4642-8889-2ce00dbff1c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 617.541046] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 617.541046] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 617.645153] env[61974]: DEBUG nova.compute.manager [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 617.780544] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fdbfc27-87d5-4ac1-afe2-9fcac0cdaa74 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.798389] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ba778e-8b0d-4333-b1fc-1d52191147ad {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.833303] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc6d1da-38b3-4f00-b2c9-68c5c2cd41ac {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.842403] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb88912-e375-4471-8b84-f49f76679642 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.859896] env[61974]: DEBUG nova.compute.provider_tree [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.172415] env[61974]: DEBUG oslo_concurrency.lockutils [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.364776] env[61974]: DEBUG nova.scheduler.client.report [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 618.870176] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 618.870460] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.910s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 618.870703] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.434s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.872204] env[61974]: INFO nova.compute.claims [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 618.879908] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 618.880313] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Getting list of instances from cluster (obj){ [ 618.880313] env[61974]: value = "domain-c8" [ 618.880313] env[61974]: _type = "ClusterComputeResource" [ 618.880313] env[61974]: } {{(pid=61974) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 618.882959] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08334a4b-3cc5-464e-a685-965997882247 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.897026] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Got total of 0 instances {{(pid=61974) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 619.311756] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.312550] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.312550] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Starting heal instance info cache {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 619.312550] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Rebuilding the list of instances to heal {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 619.544116] env[61974]: DEBUG nova.compute.manager [req-49bdf8f2-f680-4628-80b2-2493397ec211 req-41dbc45c-1d28-4de5-b946-9f7aed13ff99 service nova] [instance: 43e272e2-9256-4535-882e-3954574d5485] Received event network-changed-07d90eee-3f40-45bf-8041-27052da77ef2 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 619.544319] env[61974]: DEBUG nova.compute.manager [req-49bdf8f2-f680-4628-80b2-2493397ec211 req-41dbc45c-1d28-4de5-b946-9f7aed13ff99 service nova] [instance: 43e272e2-9256-4535-882e-3954574d5485] Refreshing instance network info cache due to event network-changed-07d90eee-3f40-45bf-8041-27052da77ef2. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 619.544528] env[61974]: DEBUG oslo_concurrency.lockutils [req-49bdf8f2-f680-4628-80b2-2493397ec211 req-41dbc45c-1d28-4de5-b946-9f7aed13ff99 service nova] Acquiring lock "refresh_cache-43e272e2-9256-4535-882e-3954574d5485" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.544666] env[61974]: DEBUG oslo_concurrency.lockutils [req-49bdf8f2-f680-4628-80b2-2493397ec211 req-41dbc45c-1d28-4de5-b946-9f7aed13ff99 service nova] Acquired lock "refresh_cache-43e272e2-9256-4535-882e-3954574d5485" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.544819] env[61974]: DEBUG nova.network.neutron [req-49bdf8f2-f680-4628-80b2-2493397ec211 req-41dbc45c-1d28-4de5-b946-9f7aed13ff99 service nova] [instance: 43e272e2-9256-4535-882e-3954574d5485] Refreshing network info cache for port 07d90eee-3f40-45bf-8041-27052da77ef2 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 619.818301] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 619.818625] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 619.818625] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 619.818804] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 619.818980] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 619.818980] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 43e272e2-9256-4535-882e-3954574d5485] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 619.819120] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 619.819469] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Didn't find any instances for network info cache update. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 619.819998] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.820374] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.820750] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.821107] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.821395] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.921164] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Acquiring lock "84448f61-d302-428f-b995-e942e27c39fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.921420] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Lock "84448f61-d302-428f-b995-e942e27c39fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.074080] env[61974]: DEBUG nova.network.neutron [req-49bdf8f2-f680-4628-80b2-2493397ec211 req-41dbc45c-1d28-4de5-b946-9f7aed13ff99 service nova] [instance: 43e272e2-9256-4535-882e-3954574d5485] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 620.116968] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7099a2d-7727-4587-901d-10e20b78c241 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.125641] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f2f5a2-d8b8-4de4-909c-6d11283231d1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.168074] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e703e664-ac51-4c47-9a78-ce558fbb9d97 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.177199] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cfb9a0a-70e9-4b37-99f0-3e1dc0113d51 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.192039] env[61974]: DEBUG nova.compute.provider_tree [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.219022] env[61974]: DEBUG nova.network.neutron [req-49bdf8f2-f680-4628-80b2-2493397ec211 req-41dbc45c-1d28-4de5-b946-9f7aed13ff99 service nova] [instance: 43e272e2-9256-4535-882e-3954574d5485] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.695356] env[61974]: DEBUG nova.scheduler.client.report [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 620.721426] env[61974]: DEBUG oslo_concurrency.lockutils [req-49bdf8f2-f680-4628-80b2-2493397ec211 req-41dbc45c-1d28-4de5-b946-9f7aed13ff99 service nova] Releasing lock "refresh_cache-43e272e2-9256-4535-882e-3954574d5485" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.721565] env[61974]: DEBUG nova.compute.manager [req-49bdf8f2-f680-4628-80b2-2493397ec211 req-41dbc45c-1d28-4de5-b946-9f7aed13ff99 service nova] [instance: 43e272e2-9256-4535-882e-3954574d5485] Received event network-vif-deleted-07d90eee-3f40-45bf-8041-27052da77ef2 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 620.892159] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Acquiring lock "32b1f31b-1e2e-4f53-8e97-265f79a74899" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.892159] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Lock "32b1f31b-1e2e-4f53-8e97-265f79a74899" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.036749] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Acquiring lock "270447a7-ebbf-4671-bc6c-522f23d21788" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.036749] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Lock "270447a7-ebbf-4671-bc6c-522f23d21788" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.202334] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.331s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.202334] env[61974]: DEBUG nova.compute.manager [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 621.206468] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.149s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.715245] env[61974]: DEBUG nova.compute.utils [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 621.721026] env[61974]: DEBUG nova.compute.manager [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 621.721026] env[61974]: DEBUG nova.network.neutron [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 621.811996] env[61974]: DEBUG nova.policy [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dc93e89c2f5a4cf1922380da33aa3252', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3660988e8884462ba32af1ce67065f9a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 622.006352] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0f333ad-0f16-476f-aa89-1e59e5b30161 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.013620] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-601f3be3-06af-4739-86e5-a9f61acbd0d0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.047111] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddbbfdf3-7872-4acc-a385-5c627cc52f54 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.055779] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7108206-8182-40fb-922d-c656f1aae448 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.072108] env[61974]: DEBUG nova.compute.provider_tree [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 622.220823] env[61974]: DEBUG nova.compute.manager [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 622.238402] env[61974]: DEBUG nova.network.neutron [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Successfully created port: 153aafe9-3dae-424f-aa21-50e0b8afbe7e {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 622.416544] env[61974]: DEBUG oslo_concurrency.lockutils [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Acquiring lock "1c7edeed-2fa7-4662-9994-21708dcb3efd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.418606] env[61974]: DEBUG oslo_concurrency.lockutils [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Lock "1c7edeed-2fa7-4662-9994-21708dcb3efd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.575336] env[61974]: DEBUG nova.scheduler.client.report [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 623.087018] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.878s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.087018] env[61974]: ERROR nova.compute.manager [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2b6b7bc2-7997-4166-8cc1-3d24a561b16e, please check neutron logs for more information. [ 623.087018] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Traceback (most recent call last): [ 623.087018] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 623.087018] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] self.driver.spawn(context, instance, image_meta, [ 623.087018] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 623.087018] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 623.087018] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 623.087018] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] vm_ref = self.build_virtual_machine(instance, [ 623.087560] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 623.087560] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] vif_infos = vmwarevif.get_vif_info(self._session, [ 623.087560] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 623.087560] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] for vif in network_info: [ 623.087560] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 623.087560] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] return self._sync_wrapper(fn, *args, **kwargs) [ 623.087560] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 623.087560] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] self.wait() [ 623.087560] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 623.087560] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] self[:] = self._gt.wait() [ 623.087560] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 623.087560] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] return self._exit_event.wait() [ 623.087560] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 623.087944] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] result = hub.switch() [ 623.087944] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 623.087944] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] return self.greenlet.switch() [ 623.087944] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 623.087944] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] result = function(*args, **kwargs) [ 623.087944] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 623.087944] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] return func(*args, **kwargs) [ 623.087944] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 623.087944] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] raise e [ 623.087944] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 623.087944] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] nwinfo = self.network_api.allocate_for_instance( [ 623.087944] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 623.087944] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] created_port_ids = self._update_ports_for_instance( [ 623.088333] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 623.088333] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] with excutils.save_and_reraise_exception(): [ 623.088333] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 623.088333] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] self.force_reraise() [ 623.088333] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 623.088333] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] raise self.value [ 623.088333] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 623.088333] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] updated_port = self._update_port( [ 623.088333] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 623.088333] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] _ensure_no_port_binding_failure(port) [ 623.088333] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 623.088333] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] raise exception.PortBindingFailed(port_id=port['id']) [ 623.088682] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] nova.exception.PortBindingFailed: Binding failed for port 2b6b7bc2-7997-4166-8cc1-3d24a561b16e, please check neutron logs for more information. [ 623.088682] env[61974]: ERROR nova.compute.manager [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] [ 623.088682] env[61974]: DEBUG nova.compute.utils [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Binding failed for port 2b6b7bc2-7997-4166-8cc1-3d24a561b16e, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 623.088682] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.362s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.090625] env[61974]: DEBUG nova.compute.manager [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Build of instance b15f2e60-9ea6-49ea-be71-6770d3f48e1d was re-scheduled: Binding failed for port 2b6b7bc2-7997-4166-8cc1-3d24a561b16e, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 623.090934] env[61974]: DEBUG nova.compute.manager [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 623.092064] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Acquiring lock "refresh_cache-b15f2e60-9ea6-49ea-be71-6770d3f48e1d" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.092241] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Acquired lock "refresh_cache-b15f2e60-9ea6-49ea-be71-6770d3f48e1d" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.092409] env[61974]: DEBUG nova.network.neutron [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 623.233112] env[61974]: DEBUG nova.compute.manager [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 623.268732] env[61974]: DEBUG nova.virt.hardware [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 623.268992] env[61974]: DEBUG nova.virt.hardware [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 623.269165] env[61974]: DEBUG nova.virt.hardware [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 623.269353] env[61974]: DEBUG nova.virt.hardware [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 623.269499] env[61974]: DEBUG nova.virt.hardware [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 623.269646] env[61974]: DEBUG nova.virt.hardware [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 623.269853] env[61974]: DEBUG nova.virt.hardware [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 623.270079] env[61974]: DEBUG nova.virt.hardware [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 623.270353] env[61974]: DEBUG nova.virt.hardware [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 623.270546] env[61974]: DEBUG nova.virt.hardware [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 623.270869] env[61974]: DEBUG nova.virt.hardware [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 623.271612] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1548058-e503-4538-87b2-55e8998ef105 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.281341] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca7982f-b8fa-49c8-8323-1f6a4c449890 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.628681] env[61974]: DEBUG nova.network.neutron [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 623.736572] env[61974]: DEBUG nova.network.neutron [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.838424] env[61974]: ERROR nova.compute.manager [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 153aafe9-3dae-424f-aa21-50e0b8afbe7e, please check neutron logs for more information. [ 623.838424] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 623.838424] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 623.838424] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 623.838424] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 623.838424] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 623.838424] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 623.838424] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 623.838424] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 623.838424] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 623.838424] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 623.838424] env[61974]: ERROR nova.compute.manager raise self.value [ 623.838424] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 623.838424] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 623.838424] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 623.838424] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 623.838977] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 623.838977] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 623.838977] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 153aafe9-3dae-424f-aa21-50e0b8afbe7e, please check neutron logs for more information. [ 623.838977] env[61974]: ERROR nova.compute.manager [ 623.838977] env[61974]: Traceback (most recent call last): [ 623.838977] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 623.838977] env[61974]: listener.cb(fileno) [ 623.838977] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 623.838977] env[61974]: result = function(*args, **kwargs) [ 623.838977] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 623.838977] env[61974]: return func(*args, **kwargs) [ 623.838977] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 623.838977] env[61974]: raise e [ 623.838977] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 623.838977] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 623.838977] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 623.838977] env[61974]: created_port_ids = self._update_ports_for_instance( [ 623.838977] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 623.838977] env[61974]: with excutils.save_and_reraise_exception(): [ 623.838977] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 623.838977] env[61974]: self.force_reraise() [ 623.838977] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 623.838977] env[61974]: raise self.value [ 623.838977] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 623.838977] env[61974]: updated_port = self._update_port( [ 623.838977] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 623.838977] env[61974]: _ensure_no_port_binding_failure(port) [ 623.838977] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 623.838977] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 623.839798] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 153aafe9-3dae-424f-aa21-50e0b8afbe7e, please check neutron logs for more information. [ 623.839798] env[61974]: Removing descriptor: 18 [ 623.839798] env[61974]: ERROR nova.compute.manager [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 153aafe9-3dae-424f-aa21-50e0b8afbe7e, please check neutron logs for more information. [ 623.839798] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Traceback (most recent call last): [ 623.839798] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 623.839798] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] yield resources [ 623.839798] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 623.839798] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] self.driver.spawn(context, instance, image_meta, [ 623.839798] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 623.839798] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 623.839798] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 623.839798] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] vm_ref = self.build_virtual_machine(instance, [ 623.840132] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 623.840132] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] vif_infos = vmwarevif.get_vif_info(self._session, [ 623.840132] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 623.840132] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] for vif in network_info: [ 623.840132] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 623.840132] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] return self._sync_wrapper(fn, *args, **kwargs) [ 623.840132] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 623.840132] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] self.wait() [ 623.840132] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 623.840132] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] self[:] = self._gt.wait() [ 623.840132] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 623.840132] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] return self._exit_event.wait() [ 623.840132] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 623.840534] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] result = hub.switch() [ 623.840534] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 623.840534] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] return self.greenlet.switch() [ 623.840534] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 623.840534] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] result = function(*args, **kwargs) [ 623.840534] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 623.840534] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] return func(*args, **kwargs) [ 623.840534] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 623.840534] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] raise e [ 623.840534] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 623.840534] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] nwinfo = self.network_api.allocate_for_instance( [ 623.840534] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 623.840534] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] created_port_ids = self._update_ports_for_instance( [ 623.840946] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 623.840946] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] with excutils.save_and_reraise_exception(): [ 623.840946] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 623.840946] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] self.force_reraise() [ 623.840946] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 623.840946] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] raise self.value [ 623.840946] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 623.840946] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] updated_port = self._update_port( [ 623.840946] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 623.840946] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] _ensure_no_port_binding_failure(port) [ 623.840946] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 623.840946] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] raise exception.PortBindingFailed(port_id=port['id']) [ 623.841312] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] nova.exception.PortBindingFailed: Binding failed for port 153aafe9-3dae-424f-aa21-50e0b8afbe7e, please check neutron logs for more information. [ 623.841312] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] [ 623.841312] env[61974]: INFO nova.compute.manager [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Terminating instance [ 623.846268] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Acquiring lock "refresh_cache-2c4e7c81-67fd-4c5f-9e96-7256c26b228a" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.846435] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Acquired lock "refresh_cache-2c4e7c81-67fd-4c5f-9e96-7256c26b228a" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.846601] env[61974]: DEBUG nova.network.neutron [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 623.872144] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9785c24e-ddce-4b0a-8cf7-21debb1aa59e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.878869] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a945781a-1fa6-49e1-a212-d642391099d0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.912036] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba80900-8a8a-4a36-b18f-a3be8cfb85ec {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.921614] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c3592b-7edf-4605-afad-ad62195c26a4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.936976] env[61974]: DEBUG nova.compute.provider_tree [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 624.241384] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Releasing lock "refresh_cache-b15f2e60-9ea6-49ea-be71-6770d3f48e1d" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.241384] env[61974]: DEBUG nova.compute.manager [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 624.241384] env[61974]: DEBUG nova.compute.manager [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 624.241384] env[61974]: DEBUG nova.network.neutron [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 624.273310] env[61974]: DEBUG nova.network.neutron [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 624.375278] env[61974]: DEBUG nova.network.neutron [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 624.446365] env[61974]: DEBUG nova.scheduler.client.report [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 624.547037] env[61974]: DEBUG nova.network.neutron [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.740877] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Acquiring lock "59c238dd-10f0-437c-a794-79bc87f05f2e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.741060] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Lock "59c238dd-10f0-437c-a794-79bc87f05f2e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.783023] env[61974]: DEBUG nova.network.neutron [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.783320] env[61974]: DEBUG nova.compute.manager [req-f16b59cd-0904-4380-b1a4-6965aa8d5b26 req-ed7a14d5-466e-46ca-a9d8-606e5dc525db service nova] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Received event network-changed-153aafe9-3dae-424f-aa21-50e0b8afbe7e {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 624.783553] env[61974]: DEBUG nova.compute.manager [req-f16b59cd-0904-4380-b1a4-6965aa8d5b26 req-ed7a14d5-466e-46ca-a9d8-606e5dc525db service nova] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Refreshing instance network info cache due to event network-changed-153aafe9-3dae-424f-aa21-50e0b8afbe7e. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 624.783663] env[61974]: DEBUG oslo_concurrency.lockutils [req-f16b59cd-0904-4380-b1a4-6965aa8d5b26 req-ed7a14d5-466e-46ca-a9d8-606e5dc525db service nova] Acquiring lock "refresh_cache-2c4e7c81-67fd-4c5f-9e96-7256c26b228a" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.956235] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.866s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.956235] env[61974]: ERROR nova.compute.manager [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7e9e80d1-55d8-43af-bbba-f654b8b19a26, please check neutron logs for more information. [ 624.956235] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Traceback (most recent call last): [ 624.956235] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 624.956235] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] self.driver.spawn(context, instance, image_meta, [ 624.956235] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 624.956235] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 624.956235] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 624.956235] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] vm_ref = self.build_virtual_machine(instance, [ 624.956759] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 624.956759] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] vif_infos = vmwarevif.get_vif_info(self._session, [ 624.956759] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 624.956759] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] for vif in network_info: [ 624.956759] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 624.956759] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] return self._sync_wrapper(fn, *args, **kwargs) [ 624.956759] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 624.956759] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] self.wait() [ 624.956759] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 624.956759] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] self[:] = self._gt.wait() [ 624.956759] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 624.956759] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] return self._exit_event.wait() [ 624.956759] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 624.957932] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] result = hub.switch() [ 624.957932] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 624.957932] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] return self.greenlet.switch() [ 624.957932] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 624.957932] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] result = function(*args, **kwargs) [ 624.957932] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 624.957932] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] return func(*args, **kwargs) [ 624.957932] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 624.957932] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] raise e [ 624.957932] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 624.957932] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] nwinfo = self.network_api.allocate_for_instance( [ 624.957932] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 624.957932] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] created_port_ids = self._update_ports_for_instance( [ 624.958679] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 624.958679] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] with excutils.save_and_reraise_exception(): [ 624.958679] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 624.958679] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] self.force_reraise() [ 624.958679] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 624.958679] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] raise self.value [ 624.958679] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 624.958679] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] updated_port = self._update_port( [ 624.958679] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 624.958679] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] _ensure_no_port_binding_failure(port) [ 624.958679] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 624.958679] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] raise exception.PortBindingFailed(port_id=port['id']) [ 624.959295] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] nova.exception.PortBindingFailed: Binding failed for port 7e9e80d1-55d8-43af-bbba-f654b8b19a26, please check neutron logs for more information. [ 624.959295] env[61974]: ERROR nova.compute.manager [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] [ 624.959295] env[61974]: DEBUG nova.compute.utils [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Binding failed for port 7e9e80d1-55d8-43af-bbba-f654b8b19a26, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 624.959295] env[61974]: DEBUG oslo_concurrency.lockutils [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.237s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.962028] env[61974]: DEBUG nova.compute.manager [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Build of instance b534ef37-c3d0-478e-86a9-4794251a00a1 was re-scheduled: Binding failed for port 7e9e80d1-55d8-43af-bbba-f654b8b19a26, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 624.962028] env[61974]: DEBUG nova.compute.manager [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 624.964532] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Acquiring lock "refresh_cache-b534ef37-c3d0-478e-86a9-4794251a00a1" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.964532] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Acquired lock "refresh_cache-b534ef37-c3d0-478e-86a9-4794251a00a1" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.964532] env[61974]: DEBUG nova.network.neutron [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 624.997326] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Acquiring lock "18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.997326] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Lock "18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.053281] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Releasing lock "refresh_cache-2c4e7c81-67fd-4c5f-9e96-7256c26b228a" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.053598] env[61974]: DEBUG nova.compute.manager [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 625.053792] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 625.054432] env[61974]: DEBUG oslo_concurrency.lockutils [req-f16b59cd-0904-4380-b1a4-6965aa8d5b26 req-ed7a14d5-466e-46ca-a9d8-606e5dc525db service nova] Acquired lock "refresh_cache-2c4e7c81-67fd-4c5f-9e96-7256c26b228a" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.055294] env[61974]: DEBUG nova.network.neutron [req-f16b59cd-0904-4380-b1a4-6965aa8d5b26 req-ed7a14d5-466e-46ca-a9d8-606e5dc525db service nova] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Refreshing network info cache for port 153aafe9-3dae-424f-aa21-50e0b8afbe7e {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 625.056399] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9f3776c3-0abc-42d8-9d77-8978ae5f10f8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.070897] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba885a65-6a21-4493-a26a-79e4cf42bec7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.100256] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2c4e7c81-67fd-4c5f-9e96-7256c26b228a could not be found. [ 625.103873] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 625.103873] env[61974]: INFO nova.compute.manager [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 625.103873] env[61974]: DEBUG oslo.service.loopingcall [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 625.103873] env[61974]: DEBUG nova.compute.manager [-] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 625.103873] env[61974]: DEBUG nova.network.neutron [-] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 625.138241] env[61974]: DEBUG nova.network.neutron [-] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 625.250772] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Acquiring lock "9d599717-0bda-4996-89d8-c41ce089eaac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.251054] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Lock "9d599717-0bda-4996-89d8-c41ce089eaac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.285032] env[61974]: INFO nova.compute.manager [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] Took 1.04 seconds to deallocate network for instance. [ 625.495994] env[61974]: DEBUG nova.network.neutron [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 625.612779] env[61974]: DEBUG nova.network.neutron [req-f16b59cd-0904-4380-b1a4-6965aa8d5b26 req-ed7a14d5-466e-46ca-a9d8-606e5dc525db service nova] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 625.644796] env[61974]: DEBUG nova.network.neutron [-] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.694165] env[61974]: DEBUG nova.network.neutron [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.803651] env[61974]: DEBUG nova.network.neutron [req-f16b59cd-0904-4380-b1a4-6965aa8d5b26 req-ed7a14d5-466e-46ca-a9d8-606e5dc525db service nova] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.870508] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6007b699-88a7-4b99-a510-f1148ff92469 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.880820] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d23f4bf-6d9b-4a4a-9f16-f20b85332038 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.915984] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce2882d-8ee8-4f62-af69-45f58e53d57e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.923522] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe8ef92-4141-460a-b574-e65e8976ce03 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.939088] env[61974]: DEBUG nova.compute.provider_tree [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 625.966821] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "8a00b16d-8274-4728-920b-a30e95fa4048" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.967088] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "8a00b16d-8274-4728-920b-a30e95fa4048" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.148296] env[61974]: INFO nova.compute.manager [-] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Took 1.05 seconds to deallocate network for instance. [ 626.150156] env[61974]: DEBUG nova.compute.claims [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 626.150156] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.197737] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Releasing lock "refresh_cache-b534ef37-c3d0-478e-86a9-4794251a00a1" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.198385] env[61974]: DEBUG nova.compute.manager [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 626.198642] env[61974]: DEBUG nova.compute.manager [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 626.198883] env[61974]: DEBUG nova.network.neutron [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 626.221852] env[61974]: DEBUG nova.network.neutron [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 626.308066] env[61974]: DEBUG oslo_concurrency.lockutils [req-f16b59cd-0904-4380-b1a4-6965aa8d5b26 req-ed7a14d5-466e-46ca-a9d8-606e5dc525db service nova] Releasing lock "refresh_cache-2c4e7c81-67fd-4c5f-9e96-7256c26b228a" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.359842] env[61974]: INFO nova.scheduler.client.report [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Deleted allocations for instance b15f2e60-9ea6-49ea-be71-6770d3f48e1d [ 626.442425] env[61974]: DEBUG nova.scheduler.client.report [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 626.727607] env[61974]: DEBUG nova.network.neutron [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.868617] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b7102c12-6224-4af8-8dfb-bc0904ca264b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Lock "b15f2e60-9ea6-49ea-be71-6770d3f48e1d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.499s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.870225] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "b15f2e60-9ea6-49ea-be71-6770d3f48e1d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 28.178s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.870311] env[61974]: INFO nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: b15f2e60-9ea6-49ea-be71-6770d3f48e1d] During sync_power_state the instance has a pending task (spawning). Skip. [ 626.870505] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "b15f2e60-9ea6-49ea-be71-6770d3f48e1d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.877956] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Acquiring lock "6f9690ab-8218-4b2c-ba36-682ea7398209" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.878639] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Lock "6f9690ab-8218-4b2c-ba36-682ea7398209" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.888652] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Acquiring lock "26cb158a-04fa-4031-b099-34dfe8a762cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.888760] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Lock "26cb158a-04fa-4031-b099-34dfe8a762cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.950707] env[61974]: DEBUG oslo_concurrency.lockutils [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.992s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.950707] env[61974]: ERROR nova.compute.manager [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e4782adc-0b1b-4be5-a669-6d3dc3ac92d8, please check neutron logs for more information. [ 626.950707] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Traceback (most recent call last): [ 626.950707] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 626.950707] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] self.driver.spawn(context, instance, image_meta, [ 626.950707] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 626.950707] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] self._vmops.spawn(context, instance, image_meta, injected_files, [ 626.950707] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 626.950707] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] vm_ref = self.build_virtual_machine(instance, [ 626.951137] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 626.951137] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] vif_infos = vmwarevif.get_vif_info(self._session, [ 626.951137] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 626.951137] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] for vif in network_info: [ 626.951137] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 626.951137] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] return self._sync_wrapper(fn, *args, **kwargs) [ 626.951137] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 626.951137] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] self.wait() [ 626.951137] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 626.951137] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] self[:] = self._gt.wait() [ 626.951137] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 626.951137] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] return self._exit_event.wait() [ 626.951137] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 626.951488] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] result = hub.switch() [ 626.951488] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 626.951488] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] return self.greenlet.switch() [ 626.951488] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 626.951488] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] result = function(*args, **kwargs) [ 626.951488] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 626.951488] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] return func(*args, **kwargs) [ 626.951488] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 626.951488] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] raise e [ 626.951488] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 626.951488] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] nwinfo = self.network_api.allocate_for_instance( [ 626.951488] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 626.951488] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] created_port_ids = self._update_ports_for_instance( [ 626.951806] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 626.951806] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] with excutils.save_and_reraise_exception(): [ 626.951806] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 626.951806] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] self.force_reraise() [ 626.951806] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 626.951806] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] raise self.value [ 626.951806] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 626.951806] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] updated_port = self._update_port( [ 626.951806] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 626.951806] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] _ensure_no_port_binding_failure(port) [ 626.951806] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 626.951806] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] raise exception.PortBindingFailed(port_id=port['id']) [ 626.952669] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] nova.exception.PortBindingFailed: Binding failed for port e4782adc-0b1b-4be5-a669-6d3dc3ac92d8, please check neutron logs for more information. [ 626.952669] env[61974]: ERROR nova.compute.manager [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] [ 626.952669] env[61974]: DEBUG nova.compute.utils [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Binding failed for port e4782adc-0b1b-4be5-a669-6d3dc3ac92d8, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 626.954123] env[61974]: DEBUG nova.compute.manager [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Build of instance 88c3e670-b97a-4797-8821-cc24d2d07115 was re-scheduled: Binding failed for port e4782adc-0b1b-4be5-a669-6d3dc3ac92d8, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 626.954563] env[61974]: DEBUG nova.compute.manager [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 626.954786] env[61974]: DEBUG oslo_concurrency.lockutils [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Acquiring lock "refresh_cache-88c3e670-b97a-4797-8821-cc24d2d07115" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.955741] env[61974]: DEBUG oslo_concurrency.lockutils [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Acquired lock "refresh_cache-88c3e670-b97a-4797-8821-cc24d2d07115" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.955741] env[61974]: DEBUG nova.network.neutron [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 626.956244] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.720s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.990891] env[61974]: DEBUG nova.network.neutron [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 627.095131] env[61974]: DEBUG nova.network.neutron [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.234459] env[61974]: INFO nova.compute.manager [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] Took 1.04 seconds to deallocate network for instance. [ 627.371770] env[61974]: DEBUG nova.compute.manager [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 627.551359] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Acquiring lock "93a736b5-5423-4378-8b0c-73a0c46414ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.551612] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Lock "93a736b5-5423-4378-8b0c-73a0c46414ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.597782] env[61974]: DEBUG oslo_concurrency.lockutils [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Releasing lock "refresh_cache-88c3e670-b97a-4797-8821-cc24d2d07115" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.598227] env[61974]: DEBUG nova.compute.manager [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 627.598227] env[61974]: DEBUG nova.compute.manager [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 627.598379] env[61974]: DEBUG nova.network.neutron [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 627.620399] env[61974]: DEBUG nova.network.neutron [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 627.761421] env[61974]: DEBUG nova.compute.manager [req-a1f68b7c-45fc-4c35-9cfd-fbee626aa6b2 req-f2b400d4-09fd-42ad-a5d3-7fde8b3be309 service nova] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Received event network-vif-deleted-153aafe9-3dae-424f-aa21-50e0b8afbe7e {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 627.800380] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Acquiring lock "67ca9fb2-9ca0-4fca-956e-961d5011df35" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.800705] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Lock "67ca9fb2-9ca0-4fca-956e-961d5011df35" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.863155] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce3bd62-924e-4981-b1b2-bdcd005d2f24 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.872517] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9690ce5e-b1e5-4feb-966c-0a066c052c48 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.909732] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff356f11-c76d-43e4-8e1e-e42528809b6e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.918114] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db5a556-0c8e-40fb-b810-8d0288bc726e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.935459] env[61974]: DEBUG nova.compute.provider_tree [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 627.940902] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.123312] env[61974]: DEBUG nova.network.neutron [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.282811] env[61974]: INFO nova.scheduler.client.report [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Deleted allocations for instance b534ef37-c3d0-478e-86a9-4794251a00a1 [ 628.443155] env[61974]: DEBUG nova.scheduler.client.report [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 628.626345] env[61974]: INFO nova.compute.manager [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] [instance: 88c3e670-b97a-4797-8821-cc24d2d07115] Took 1.03 seconds to deallocate network for instance. [ 628.797637] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6316ac1e-9fd2-4cb4-b9bd-4c5872c56608 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Lock "b534ef37-c3d0-478e-86a9-4794251a00a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.596s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.799107] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "b534ef37-c3d0-478e-86a9-4794251a00a1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 30.106s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.799234] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1444fe71-25d1-4e8e-92ed-1e42a7da959b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.817089] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539db837-bd01-4d0a-9b7d-0f2d9896ac49 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.952867] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.993s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.952867] env[61974]: ERROR nova.compute.manager [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b24ecf01-24e9-4274-a294-0789bdbed06d, please check neutron logs for more information. [ 628.952867] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Traceback (most recent call last): [ 628.952867] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 628.952867] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] self.driver.spawn(context, instance, image_meta, [ 628.952867] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 628.952867] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] self._vmops.spawn(context, instance, image_meta, injected_files, [ 628.952867] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 628.952867] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] vm_ref = self.build_virtual_machine(instance, [ 628.953272] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 628.953272] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] vif_infos = vmwarevif.get_vif_info(self._session, [ 628.953272] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 628.953272] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] for vif in network_info: [ 628.953272] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 628.953272] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] return self._sync_wrapper(fn, *args, **kwargs) [ 628.953272] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 628.953272] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] self.wait() [ 628.953272] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 628.953272] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] self[:] = self._gt.wait() [ 628.953272] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 628.953272] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] return self._exit_event.wait() [ 628.953272] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 628.953607] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] result = hub.switch() [ 628.953607] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 628.953607] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] return self.greenlet.switch() [ 628.953607] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 628.953607] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] result = function(*args, **kwargs) [ 628.953607] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 628.953607] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] return func(*args, **kwargs) [ 628.953607] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 628.953607] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] raise e [ 628.953607] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 628.953607] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] nwinfo = self.network_api.allocate_for_instance( [ 628.953607] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 628.953607] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] created_port_ids = self._update_ports_for_instance( [ 628.954790] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 628.954790] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] with excutils.save_and_reraise_exception(): [ 628.954790] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.954790] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] self.force_reraise() [ 628.954790] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.954790] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] raise self.value [ 628.954790] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 628.954790] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] updated_port = self._update_port( [ 628.954790] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.954790] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] _ensure_no_port_binding_failure(port) [ 628.954790] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.954790] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] raise exception.PortBindingFailed(port_id=port['id']) [ 628.955135] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] nova.exception.PortBindingFailed: Binding failed for port b24ecf01-24e9-4274-a294-0789bdbed06d, please check neutron logs for more information. [ 628.955135] env[61974]: ERROR nova.compute.manager [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] [ 628.955135] env[61974]: DEBUG nova.compute.utils [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Binding failed for port b24ecf01-24e9-4274-a294-0789bdbed06d, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 628.955135] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.791s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.958115] env[61974]: INFO nova.compute.claims [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 628.961056] env[61974]: DEBUG nova.compute.manager [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Build of instance 722b2d1a-9b8a-4120-85ce-c15cddd46479 was re-scheduled: Binding failed for port b24ecf01-24e9-4274-a294-0789bdbed06d, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 628.961056] env[61974]: DEBUG nova.compute.manager [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 628.961056] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Acquiring lock "refresh_cache-722b2d1a-9b8a-4120-85ce-c15cddd46479" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.961371] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Acquired lock "refresh_cache-722b2d1a-9b8a-4120-85ce-c15cddd46479" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.961520] env[61974]: DEBUG nova.network.neutron [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 629.310055] env[61974]: DEBUG nova.compute.manager [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 629.350203] env[61974]: INFO nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: b534ef37-c3d0-478e-86a9-4794251a00a1] During the sync_power process the instance has moved from host None to host cpu-1 [ 629.350203] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "b534ef37-c3d0-478e-86a9-4794251a00a1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.551s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.490011] env[61974]: DEBUG nova.network.neutron [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 629.671023] env[61974]: INFO nova.scheduler.client.report [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Deleted allocations for instance 88c3e670-b97a-4797-8821-cc24d2d07115 [ 629.830871] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.834190] env[61974]: DEBUG nova.network.neutron [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.186164] env[61974]: DEBUG oslo_concurrency.lockutils [None req-cdd47d8e-d062-4733-a65b-4eb2db17a10a tempest-AttachInterfacesUnderV243Test-1760290627 tempest-AttachInterfacesUnderV243Test-1760290627-project-member] Lock "88c3e670-b97a-4797-8821-cc24d2d07115" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.896s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.322139] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab1bfd1-a8f4-47fa-a99b-0fcfd18060c2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.329548] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4a3e86-5718-4b90-9ad3-2fa9ac201f6e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.336328] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Releasing lock "refresh_cache-722b2d1a-9b8a-4120-85ce-c15cddd46479" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.337178] env[61974]: DEBUG nova.compute.manager [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 630.337986] env[61974]: DEBUG nova.compute.manager [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 630.337986] env[61974]: DEBUG nova.network.neutron [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 630.376140] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a6194fe-ed44-4be3-8959-060b8b64fe77 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.385060] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d6a9cf-6a7c-4aaf-8002-9257267c9241 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.403046] env[61974]: DEBUG nova.compute.provider_tree [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 630.420210] env[61974]: DEBUG nova.network.neutron [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 630.690788] env[61974]: DEBUG nova.compute.manager [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 630.906761] env[61974]: DEBUG nova.scheduler.client.report [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 630.927202] env[61974]: DEBUG nova.network.neutron [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.224142] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.414908] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.460s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 631.416031] env[61974]: DEBUG nova.compute.manager [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 631.418708] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.983s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.431019] env[61974]: INFO nova.compute.manager [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] [instance: 722b2d1a-9b8a-4120-85ce-c15cddd46479] Took 1.09 seconds to deallocate network for instance. [ 631.924480] env[61974]: DEBUG nova.compute.utils [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 631.931322] env[61974]: DEBUG nova.compute.manager [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 631.933700] env[61974]: DEBUG nova.network.neutron [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 631.998167] env[61974]: DEBUG nova.policy [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1a0f719eb1684bc89abe6ae6f978e47d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '88aaa0f0090946569ea568c460b6da62', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 632.063206] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Acquiring lock "ccc4d6d9-979a-468a-9b7a-4633662c4052" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 632.063543] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Lock "ccc4d6d9-979a-468a-9b7a-4633662c4052" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 632.329326] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a2bffd-952c-4300-ac78-6ff299d5f555 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.340967] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe49d3a-3464-4226-bd85-b1854ea74686 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.379584] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2e2d1c0-9593-45f3-8562-546382e8f2dd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.389287] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ae894e-ebc9-4e23-86b0-36b353bf36ad {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.404082] env[61974]: DEBUG nova.compute.provider_tree [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 632.418273] env[61974]: DEBUG nova.network.neutron [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Successfully created port: 270ec13f-ec53-4668-ad5d-a03e46ac6083 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 632.432011] env[61974]: DEBUG nova.compute.manager [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 632.458140] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "c3eb4869-0bde-4398-bf34-3ee6073174e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 632.458140] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "c3eb4869-0bde-4398-bf34-3ee6073174e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 632.530657] env[61974]: INFO nova.scheduler.client.report [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Deleted allocations for instance 722b2d1a-9b8a-4120-85ce-c15cddd46479 [ 632.908206] env[61974]: DEBUG nova.scheduler.client.report [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 633.040630] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f6cf81-484f-42cd-b6f3-81479f4895d9 tempest-ServerPasswordTestJSON-725073603 tempest-ServerPasswordTestJSON-725073603-project-member] Lock "722b2d1a-9b8a-4120-85ce-c15cddd46479" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.075s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.415214] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.996s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.415944] env[61974]: ERROR nova.compute.manager [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fb14e838-6ca0-4225-902c-d9ca7d0ce70a, please check neutron logs for more information. [ 633.415944] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Traceback (most recent call last): [ 633.415944] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 633.415944] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] self.driver.spawn(context, instance, image_meta, [ 633.415944] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 633.415944] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 633.415944] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 633.415944] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] vm_ref = self.build_virtual_machine(instance, [ 633.415944] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 633.415944] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] vif_infos = vmwarevif.get_vif_info(self._session, [ 633.415944] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 633.416368] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] for vif in network_info: [ 633.416368] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 633.416368] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] return self._sync_wrapper(fn, *args, **kwargs) [ 633.416368] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 633.416368] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] self.wait() [ 633.416368] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 633.416368] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] self[:] = self._gt.wait() [ 633.416368] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 633.416368] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] return self._exit_event.wait() [ 633.416368] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 633.416368] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] result = hub.switch() [ 633.416368] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 633.416368] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] return self.greenlet.switch() [ 633.416739] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 633.416739] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] result = function(*args, **kwargs) [ 633.416739] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 633.416739] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] return func(*args, **kwargs) [ 633.416739] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 633.416739] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] raise e [ 633.416739] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 633.416739] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] nwinfo = self.network_api.allocate_for_instance( [ 633.416739] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 633.416739] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] created_port_ids = self._update_ports_for_instance( [ 633.416739] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 633.416739] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] with excutils.save_and_reraise_exception(): [ 633.416739] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.417197] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] self.force_reraise() [ 633.417197] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.417197] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] raise self.value [ 633.417197] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 633.417197] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] updated_port = self._update_port( [ 633.417197] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.417197] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] _ensure_no_port_binding_failure(port) [ 633.417197] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.417197] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] raise exception.PortBindingFailed(port_id=port['id']) [ 633.417197] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] nova.exception.PortBindingFailed: Binding failed for port fb14e838-6ca0-4225-902c-d9ca7d0ce70a, please check neutron logs for more information. [ 633.417197] env[61974]: ERROR nova.compute.manager [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] [ 633.417517] env[61974]: DEBUG nova.compute.utils [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Binding failed for port fb14e838-6ca0-4225-902c-d9ca7d0ce70a, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 633.419163] env[61974]: DEBUG nova.compute.manager [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Build of instance 9b3b4b80-9de4-4e59-b211-38bc8d9caeae was re-scheduled: Binding failed for port fb14e838-6ca0-4225-902c-d9ca7d0ce70a, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 633.419619] env[61974]: DEBUG nova.compute.manager [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 633.420400] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Acquiring lock "refresh_cache-9b3b4b80-9de4-4e59-b211-38bc8d9caeae" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.420400] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Acquired lock "refresh_cache-9b3b4b80-9de4-4e59-b211-38bc8d9caeae" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.420400] env[61974]: DEBUG nova.network.neutron [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 633.421328] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.462s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 633.423739] env[61974]: INFO nova.compute.claims [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 633.454489] env[61974]: DEBUG nova.compute.manager [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 633.496228] env[61974]: DEBUG nova.virt.hardware [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 633.496539] env[61974]: DEBUG nova.virt.hardware [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 633.497012] env[61974]: DEBUG nova.virt.hardware [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 633.497012] env[61974]: DEBUG nova.virt.hardware [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 633.497617] env[61974]: DEBUG nova.virt.hardware [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 633.497617] env[61974]: DEBUG nova.virt.hardware [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 633.497736] env[61974]: DEBUG nova.virt.hardware [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 633.498450] env[61974]: DEBUG nova.virt.hardware [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 633.498741] env[61974]: DEBUG nova.virt.hardware [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 633.500345] env[61974]: DEBUG nova.virt.hardware [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 633.500345] env[61974]: DEBUG nova.virt.hardware [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 633.500345] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c73ee0d-fdf0-499b-856a-24dccc666630 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.509121] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bda721d-b808-407e-918f-1edfd5d23da4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.544393] env[61974]: DEBUG nova.compute.manager [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 634.072460] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.145160] env[61974]: DEBUG nova.network.neutron [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 634.212758] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Acquiring lock "6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.212758] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Lock "6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.433762] env[61974]: DEBUG nova.network.neutron [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.865490] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e48274d9-f577-4eb4-82b2-31a3a66ebc82 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.877042] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-534ccd4d-856f-4f66-8961-19002c502e06 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.914471] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b19ebbb8-9687-4bd5-9f0e-96a3c89cb487 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.926310] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-411012d4-48c1-41d4-8729-d47345129ab7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.942640] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Releasing lock "refresh_cache-9b3b4b80-9de4-4e59-b211-38bc8d9caeae" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.943378] env[61974]: DEBUG nova.compute.manager [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 634.943453] env[61974]: DEBUG nova.compute.manager [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 634.943582] env[61974]: DEBUG nova.network.neutron [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 634.945481] env[61974]: DEBUG nova.compute.provider_tree [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.950918] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Acquiring lock "9f781418-6149-4c73-aaa0-20c8cbc8c482" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.951346] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Lock "9f781418-6149-4c73-aaa0-20c8cbc8c482" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.982617] env[61974]: DEBUG nova.network.neutron [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 635.362804] env[61974]: DEBUG nova.compute.manager [req-d68d4909-2942-4359-9410-c0df621d32a8 req-53d18257-04e2-4db8-9272-14748b5f746d service nova] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Received event network-changed-270ec13f-ec53-4668-ad5d-a03e46ac6083 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 635.363180] env[61974]: DEBUG nova.compute.manager [req-d68d4909-2942-4359-9410-c0df621d32a8 req-53d18257-04e2-4db8-9272-14748b5f746d service nova] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Refreshing instance network info cache due to event network-changed-270ec13f-ec53-4668-ad5d-a03e46ac6083. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 635.363341] env[61974]: DEBUG oslo_concurrency.lockutils [req-d68d4909-2942-4359-9410-c0df621d32a8 req-53d18257-04e2-4db8-9272-14748b5f746d service nova] Acquiring lock "refresh_cache-287496bf-b981-41d5-81fc-791d793c244e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.363488] env[61974]: DEBUG oslo_concurrency.lockutils [req-d68d4909-2942-4359-9410-c0df621d32a8 req-53d18257-04e2-4db8-9272-14748b5f746d service nova] Acquired lock "refresh_cache-287496bf-b981-41d5-81fc-791d793c244e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.363638] env[61974]: DEBUG nova.network.neutron [req-d68d4909-2942-4359-9410-c0df621d32a8 req-53d18257-04e2-4db8-9272-14748b5f746d service nova] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Refreshing network info cache for port 270ec13f-ec53-4668-ad5d-a03e46ac6083 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 635.452876] env[61974]: DEBUG nova.scheduler.client.report [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 635.485303] env[61974]: DEBUG nova.network.neutron [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.513263] env[61974]: ERROR nova.compute.manager [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 270ec13f-ec53-4668-ad5d-a03e46ac6083, please check neutron logs for more information. [ 635.513263] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 635.513263] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 635.513263] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 635.513263] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 635.513263] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 635.513263] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 635.513263] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 635.513263] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 635.513263] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 635.513263] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 635.513263] env[61974]: ERROR nova.compute.manager raise self.value [ 635.513263] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 635.513263] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 635.513263] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 635.513263] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 635.513776] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 635.513776] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 635.513776] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 270ec13f-ec53-4668-ad5d-a03e46ac6083, please check neutron logs for more information. [ 635.513776] env[61974]: ERROR nova.compute.manager [ 635.513776] env[61974]: Traceback (most recent call last): [ 635.513776] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 635.513776] env[61974]: listener.cb(fileno) [ 635.513776] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 635.513776] env[61974]: result = function(*args, **kwargs) [ 635.513776] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 635.513776] env[61974]: return func(*args, **kwargs) [ 635.513776] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 635.513776] env[61974]: raise e [ 635.513776] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 635.513776] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 635.513776] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 635.513776] env[61974]: created_port_ids = self._update_ports_for_instance( [ 635.513776] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 635.513776] env[61974]: with excutils.save_and_reraise_exception(): [ 635.513776] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 635.513776] env[61974]: self.force_reraise() [ 635.513776] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 635.513776] env[61974]: raise self.value [ 635.513776] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 635.513776] env[61974]: updated_port = self._update_port( [ 635.513776] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 635.513776] env[61974]: _ensure_no_port_binding_failure(port) [ 635.513776] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 635.513776] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 635.514791] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 270ec13f-ec53-4668-ad5d-a03e46ac6083, please check neutron logs for more information. [ 635.514791] env[61974]: Removing descriptor: 20 [ 635.514791] env[61974]: ERROR nova.compute.manager [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 270ec13f-ec53-4668-ad5d-a03e46ac6083, please check neutron logs for more information. [ 635.514791] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] Traceback (most recent call last): [ 635.514791] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 635.514791] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] yield resources [ 635.514791] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 635.514791] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] self.driver.spawn(context, instance, image_meta, [ 635.514791] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 635.514791] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 635.514791] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 635.514791] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] vm_ref = self.build_virtual_machine(instance, [ 635.515194] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 635.515194] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] vif_infos = vmwarevif.get_vif_info(self._session, [ 635.515194] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 635.515194] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] for vif in network_info: [ 635.515194] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 635.515194] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] return self._sync_wrapper(fn, *args, **kwargs) [ 635.515194] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 635.515194] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] self.wait() [ 635.515194] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 635.515194] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] self[:] = self._gt.wait() [ 635.515194] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 635.515194] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] return self._exit_event.wait() [ 635.515194] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 635.515621] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] result = hub.switch() [ 635.515621] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 635.515621] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] return self.greenlet.switch() [ 635.515621] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 635.515621] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] result = function(*args, **kwargs) [ 635.515621] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 635.515621] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] return func(*args, **kwargs) [ 635.515621] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 635.515621] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] raise e [ 635.515621] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 635.515621] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] nwinfo = self.network_api.allocate_for_instance( [ 635.515621] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 635.515621] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] created_port_ids = self._update_ports_for_instance( [ 635.516019] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 635.516019] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] with excutils.save_and_reraise_exception(): [ 635.516019] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 635.516019] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] self.force_reraise() [ 635.516019] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 635.516019] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] raise self.value [ 635.516019] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 635.516019] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] updated_port = self._update_port( [ 635.516019] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 635.516019] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] _ensure_no_port_binding_failure(port) [ 635.516019] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 635.516019] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] raise exception.PortBindingFailed(port_id=port['id']) [ 635.517149] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] nova.exception.PortBindingFailed: Binding failed for port 270ec13f-ec53-4668-ad5d-a03e46ac6083, please check neutron logs for more information. [ 635.517149] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] [ 635.517149] env[61974]: INFO nova.compute.manager [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Terminating instance [ 635.519807] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Acquiring lock "refresh_cache-287496bf-b981-41d5-81fc-791d793c244e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.891023] env[61974]: DEBUG nova.network.neutron [req-d68d4909-2942-4359-9410-c0df621d32a8 req-53d18257-04e2-4db8-9272-14748b5f746d service nova] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 635.959012] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.538s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 635.959555] env[61974]: DEBUG nova.compute.manager [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 635.965376] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.510s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.990273] env[61974]: INFO nova.compute.manager [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] [instance: 9b3b4b80-9de4-4e59-b211-38bc8d9caeae] Took 1.05 seconds to deallocate network for instance. [ 636.044025] env[61974]: DEBUG nova.network.neutron [req-d68d4909-2942-4359-9410-c0df621d32a8 req-53d18257-04e2-4db8-9272-14748b5f746d service nova] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.468890] env[61974]: DEBUG nova.compute.utils [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 636.477080] env[61974]: DEBUG nova.compute.manager [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 636.477080] env[61974]: DEBUG nova.network.neutron [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 636.546592] env[61974]: DEBUG oslo_concurrency.lockutils [req-d68d4909-2942-4359-9410-c0df621d32a8 req-53d18257-04e2-4db8-9272-14748b5f746d service nova] Releasing lock "refresh_cache-287496bf-b981-41d5-81fc-791d793c244e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.547507] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Acquired lock "refresh_cache-287496bf-b981-41d5-81fc-791d793c244e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.547507] env[61974]: DEBUG nova.network.neutron [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 636.698760] env[61974]: DEBUG nova.policy [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98306e8a7e834885881d3f86a4588372', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e61302272f01468c975aa242b9222361', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 636.839348] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Acquiring lock "9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.839348] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Lock "9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.973592] env[61974]: DEBUG nova.compute.manager [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 636.977877] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7de8042-0075-4fb6-ae6c-02d3a1ccd3c2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.989098] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa977f96-49bc-4697-bd97-93d7ddae0cba {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.040699] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c354798e-9e64-4320-b459-d8ad98a7b029 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.050779] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b127b6-7160-4946-b528-0e12000cc99c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.071391] env[61974]: DEBUG nova.compute.provider_tree [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 637.078019] env[61974]: INFO nova.scheduler.client.report [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Deleted allocations for instance 9b3b4b80-9de4-4e59-b211-38bc8d9caeae [ 637.103061] env[61974]: DEBUG nova.network.neutron [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 637.307186] env[61974]: DEBUG nova.network.neutron [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.490659] env[61974]: DEBUG nova.network.neutron [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Successfully created port: e78a7d88-ebd9-4dcd-b1cc-61d929b8ef15 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 637.584169] env[61974]: DEBUG nova.scheduler.client.report [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 637.587861] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e0debf0d-c070-43f9-9f23-b4a10698ce56 tempest-ServersAdminTestJSON-1960166960 tempest-ServersAdminTestJSON-1960166960-project-member] Lock "9b3b4b80-9de4-4e59-b211-38bc8d9caeae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.851s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.612969] env[61974]: DEBUG nova.compute.manager [req-da180869-2f0d-45a5-8827-2ff0ade4c89f req-d38468e8-21a2-403e-a2b0-37cd6422fada service nova] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Received event network-vif-deleted-270ec13f-ec53-4668-ad5d-a03e46ac6083 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 637.810812] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Releasing lock "refresh_cache-287496bf-b981-41d5-81fc-791d793c244e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.810812] env[61974]: DEBUG nova.compute.manager [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 637.810993] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 637.811317] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-29424adc-c4ae-44f3-a6d0-a08ee643a1ff {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.824234] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b117bf-c0b2-4655-a362-ffa43db44330 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.848676] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 287496bf-b981-41d5-81fc-791d793c244e could not be found. [ 637.850132] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 637.850132] env[61974]: INFO nova.compute.manager [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 637.850132] env[61974]: DEBUG oslo.service.loopingcall [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 637.850132] env[61974]: DEBUG nova.compute.manager [-] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 637.850132] env[61974]: DEBUG nova.network.neutron [-] [instance: 287496bf-b981-41d5-81fc-791d793c244e] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 637.881188] env[61974]: DEBUG nova.network.neutron [-] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 637.991956] env[61974]: DEBUG nova.compute.manager [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 638.018042] env[61974]: DEBUG nova.virt.hardware [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 638.018462] env[61974]: DEBUG nova.virt.hardware [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 638.018570] env[61974]: DEBUG nova.virt.hardware [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 638.018765] env[61974]: DEBUG nova.virt.hardware [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 638.018898] env[61974]: DEBUG nova.virt.hardware [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 638.019444] env[61974]: DEBUG nova.virt.hardware [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 638.019803] env[61974]: DEBUG nova.virt.hardware [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 638.020033] env[61974]: DEBUG nova.virt.hardware [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 638.020220] env[61974]: DEBUG nova.virt.hardware [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 638.020390] env[61974]: DEBUG nova.virt.hardware [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 638.020793] env[61974]: DEBUG nova.virt.hardware [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 638.021680] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd8336c2-8bca-42ce-93ba-5e48a287975a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.032986] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a084655-b908-4a8d-a453-24bbca4b7183 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.089274] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.126s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.090349] env[61974]: ERROR nova.compute.manager [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 07d90eee-3f40-45bf-8041-27052da77ef2, please check neutron logs for more information. [ 638.090349] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] Traceback (most recent call last): [ 638.090349] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 638.090349] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] self.driver.spawn(context, instance, image_meta, [ 638.090349] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 638.090349] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] self._vmops.spawn(context, instance, image_meta, injected_files, [ 638.090349] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 638.090349] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] vm_ref = self.build_virtual_machine(instance, [ 638.090349] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 638.090349] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] vif_infos = vmwarevif.get_vif_info(self._session, [ 638.090349] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 638.090735] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] for vif in network_info: [ 638.090735] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 638.090735] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] return self._sync_wrapper(fn, *args, **kwargs) [ 638.090735] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 638.090735] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] self.wait() [ 638.090735] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 638.090735] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] self[:] = self._gt.wait() [ 638.090735] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 638.090735] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] return self._exit_event.wait() [ 638.090735] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 638.090735] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] result = hub.switch() [ 638.090735] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 638.090735] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] return self.greenlet.switch() [ 638.091156] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 638.091156] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] result = function(*args, **kwargs) [ 638.091156] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 638.091156] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] return func(*args, **kwargs) [ 638.091156] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 638.091156] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] raise e [ 638.091156] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 638.091156] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] nwinfo = self.network_api.allocate_for_instance( [ 638.091156] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 638.091156] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] created_port_ids = self._update_ports_for_instance( [ 638.091156] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 638.091156] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] with excutils.save_and_reraise_exception(): [ 638.091156] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 638.091798] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] self.force_reraise() [ 638.091798] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 638.091798] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] raise self.value [ 638.091798] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 638.091798] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] updated_port = self._update_port( [ 638.091798] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 638.091798] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] _ensure_no_port_binding_failure(port) [ 638.091798] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 638.091798] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] raise exception.PortBindingFailed(port_id=port['id']) [ 638.091798] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] nova.exception.PortBindingFailed: Binding failed for port 07d90eee-3f40-45bf-8041-27052da77ef2, please check neutron logs for more information. [ 638.091798] env[61974]: ERROR nova.compute.manager [instance: 43e272e2-9256-4535-882e-3954574d5485] [ 638.092123] env[61974]: DEBUG nova.compute.utils [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Binding failed for port 07d90eee-3f40-45bf-8041-27052da77ef2, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 638.092674] env[61974]: DEBUG oslo_concurrency.lockutils [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.920s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.094410] env[61974]: INFO nova.compute.claims [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 638.098768] env[61974]: DEBUG nova.compute.manager [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Build of instance 43e272e2-9256-4535-882e-3954574d5485 was re-scheduled: Binding failed for port 07d90eee-3f40-45bf-8041-27052da77ef2, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 638.099247] env[61974]: DEBUG nova.compute.manager [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 638.099463] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Acquiring lock "refresh_cache-43e272e2-9256-4535-882e-3954574d5485" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 638.099611] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Acquired lock "refresh_cache-43e272e2-9256-4535-882e-3954574d5485" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.099771] env[61974]: DEBUG nova.network.neutron [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 638.104607] env[61974]: DEBUG nova.compute.manager [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 638.384040] env[61974]: DEBUG nova.network.neutron [-] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.645414] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.651937] env[61974]: DEBUG nova.network.neutron [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 638.888841] env[61974]: INFO nova.compute.manager [-] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Took 1.04 seconds to deallocate network for instance. [ 638.896473] env[61974]: DEBUG nova.compute.claims [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 638.896678] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.049231] env[61974]: DEBUG nova.network.neutron [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.495883] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c7cc90-06ce-40ac-86bf-225a8d4e31fd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.503979] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e0ed457-8775-4505-957b-759e0eb0b672 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.536713] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f9b1995-3848-4c23-a2e2-12607ec31162 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.543640] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd692695-903d-4b16-a4a7-5bbc1c29d575 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.557579] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Releasing lock "refresh_cache-43e272e2-9256-4535-882e-3954574d5485" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 639.557579] env[61974]: DEBUG nova.compute.manager [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 639.557579] env[61974]: DEBUG nova.compute.manager [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 639.557579] env[61974]: DEBUG nova.network.neutron [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 639.559436] env[61974]: DEBUG nova.compute.provider_tree [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.600418] env[61974]: DEBUG nova.network.neutron [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 640.064626] env[61974]: DEBUG nova.scheduler.client.report [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 640.075418] env[61974]: DEBUG nova.compute.manager [req-7dd5f236-65ff-4347-aca3-48312ed012c2 req-1045ed83-b44e-41d5-8402-15d71f0578bc service nova] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Received event network-changed-e78a7d88-ebd9-4dcd-b1cc-61d929b8ef15 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 640.076887] env[61974]: DEBUG nova.compute.manager [req-7dd5f236-65ff-4347-aca3-48312ed012c2 req-1045ed83-b44e-41d5-8402-15d71f0578bc service nova] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Refreshing instance network info cache due to event network-changed-e78a7d88-ebd9-4dcd-b1cc-61d929b8ef15. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 640.076887] env[61974]: DEBUG oslo_concurrency.lockutils [req-7dd5f236-65ff-4347-aca3-48312ed012c2 req-1045ed83-b44e-41d5-8402-15d71f0578bc service nova] Acquiring lock "refresh_cache-62413031-5c7a-498a-9aee-5d9015ef1574" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.076887] env[61974]: DEBUG oslo_concurrency.lockutils [req-7dd5f236-65ff-4347-aca3-48312ed012c2 req-1045ed83-b44e-41d5-8402-15d71f0578bc service nova] Acquired lock "refresh_cache-62413031-5c7a-498a-9aee-5d9015ef1574" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.076887] env[61974]: DEBUG nova.network.neutron [req-7dd5f236-65ff-4347-aca3-48312ed012c2 req-1045ed83-b44e-41d5-8402-15d71f0578bc service nova] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Refreshing network info cache for port e78a7d88-ebd9-4dcd-b1cc-61d929b8ef15 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 640.102720] env[61974]: DEBUG nova.network.neutron [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.311996] env[61974]: ERROR nova.compute.manager [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e78a7d88-ebd9-4dcd-b1cc-61d929b8ef15, please check neutron logs for more information. [ 640.311996] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 640.311996] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 640.311996] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 640.311996] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 640.311996] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 640.311996] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 640.311996] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 640.311996] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 640.311996] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 640.311996] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 640.311996] env[61974]: ERROR nova.compute.manager raise self.value [ 640.311996] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 640.311996] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 640.311996] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 640.311996] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 640.312560] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 640.312560] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 640.312560] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e78a7d88-ebd9-4dcd-b1cc-61d929b8ef15, please check neutron logs for more information. [ 640.312560] env[61974]: ERROR nova.compute.manager [ 640.312560] env[61974]: Traceback (most recent call last): [ 640.312560] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 640.312560] env[61974]: listener.cb(fileno) [ 640.312560] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 640.312560] env[61974]: result = function(*args, **kwargs) [ 640.312560] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 640.312560] env[61974]: return func(*args, **kwargs) [ 640.312560] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 640.312560] env[61974]: raise e [ 640.312560] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 640.312560] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 640.312560] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 640.312560] env[61974]: created_port_ids = self._update_ports_for_instance( [ 640.312560] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 640.312560] env[61974]: with excutils.save_and_reraise_exception(): [ 640.312560] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 640.312560] env[61974]: self.force_reraise() [ 640.312560] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 640.312560] env[61974]: raise self.value [ 640.312560] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 640.312560] env[61974]: updated_port = self._update_port( [ 640.312560] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 640.312560] env[61974]: _ensure_no_port_binding_failure(port) [ 640.312560] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 640.312560] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 640.313458] env[61974]: nova.exception.PortBindingFailed: Binding failed for port e78a7d88-ebd9-4dcd-b1cc-61d929b8ef15, please check neutron logs for more information. [ 640.313458] env[61974]: Removing descriptor: 20 [ 640.313458] env[61974]: ERROR nova.compute.manager [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e78a7d88-ebd9-4dcd-b1cc-61d929b8ef15, please check neutron logs for more information. [ 640.313458] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Traceback (most recent call last): [ 640.313458] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 640.313458] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] yield resources [ 640.313458] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 640.313458] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] self.driver.spawn(context, instance, image_meta, [ 640.313458] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 640.313458] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] self._vmops.spawn(context, instance, image_meta, injected_files, [ 640.313458] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 640.313458] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] vm_ref = self.build_virtual_machine(instance, [ 640.313854] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 640.313854] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] vif_infos = vmwarevif.get_vif_info(self._session, [ 640.313854] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 640.313854] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] for vif in network_info: [ 640.313854] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 640.313854] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] return self._sync_wrapper(fn, *args, **kwargs) [ 640.313854] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 640.313854] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] self.wait() [ 640.313854] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 640.313854] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] self[:] = self._gt.wait() [ 640.313854] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 640.313854] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] return self._exit_event.wait() [ 640.313854] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 640.314340] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] result = hub.switch() [ 640.314340] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 640.314340] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] return self.greenlet.switch() [ 640.314340] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 640.314340] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] result = function(*args, **kwargs) [ 640.314340] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 640.314340] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] return func(*args, **kwargs) [ 640.314340] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 640.314340] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] raise e [ 640.314340] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 640.314340] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] nwinfo = self.network_api.allocate_for_instance( [ 640.314340] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 640.314340] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] created_port_ids = self._update_ports_for_instance( [ 640.314801] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 640.314801] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] with excutils.save_and_reraise_exception(): [ 640.314801] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 640.314801] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] self.force_reraise() [ 640.314801] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 640.314801] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] raise self.value [ 640.314801] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 640.314801] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] updated_port = self._update_port( [ 640.314801] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 640.314801] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] _ensure_no_port_binding_failure(port) [ 640.314801] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 640.314801] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] raise exception.PortBindingFailed(port_id=port['id']) [ 640.315866] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] nova.exception.PortBindingFailed: Binding failed for port e78a7d88-ebd9-4dcd-b1cc-61d929b8ef15, please check neutron logs for more information. [ 640.315866] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] [ 640.315866] env[61974]: INFO nova.compute.manager [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Terminating instance [ 640.318698] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Acquiring lock "refresh_cache-62413031-5c7a-498a-9aee-5d9015ef1574" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.572918] env[61974]: DEBUG oslo_concurrency.lockutils [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.481s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.573486] env[61974]: DEBUG nova.compute.manager [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 640.576232] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.426s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.609018] env[61974]: DEBUG nova.network.neutron [req-7dd5f236-65ff-4347-aca3-48312ed012c2 req-1045ed83-b44e-41d5-8402-15d71f0578bc service nova] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 640.611264] env[61974]: INFO nova.compute.manager [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] [instance: 43e272e2-9256-4535-882e-3954574d5485] Took 1.05 seconds to deallocate network for instance. [ 640.822403] env[61974]: DEBUG nova.network.neutron [req-7dd5f236-65ff-4347-aca3-48312ed012c2 req-1045ed83-b44e-41d5-8402-15d71f0578bc service nova] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.082494] env[61974]: DEBUG nova.compute.utils [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 641.083363] env[61974]: DEBUG nova.compute.manager [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 641.084987] env[61974]: DEBUG nova.network.neutron [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 641.221725] env[61974]: DEBUG nova.policy [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cd9d6180f3246a8a0f820bd160e441c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a3b081e49bc420c8488bcb8061bcc27', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 641.325935] env[61974]: DEBUG oslo_concurrency.lockutils [req-7dd5f236-65ff-4347-aca3-48312ed012c2 req-1045ed83-b44e-41d5-8402-15d71f0578bc service nova] Releasing lock "refresh_cache-62413031-5c7a-498a-9aee-5d9015ef1574" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.326631] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Acquired lock "refresh_cache-62413031-5c7a-498a-9aee-5d9015ef1574" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.326820] env[61974]: DEBUG nova.network.neutron [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 641.558519] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef18a4b-2cca-4f99-bee3-03b67534b6d1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.569995] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d70d153-a8f0-402a-80e4-0f49c59b964f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.608694] env[61974]: DEBUG nova.compute.manager [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 641.613071] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e077d0-f926-432a-8161-5a0ee0d25767 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.621546] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c5e0b5-e78a-4ac3-80bf-93daa382dae9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.643849] env[61974]: DEBUG nova.compute.provider_tree [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 641.673541] env[61974]: INFO nova.scheduler.client.report [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Deleted allocations for instance 43e272e2-9256-4535-882e-3954574d5485 [ 641.861562] env[61974]: DEBUG nova.network.neutron [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 641.990129] env[61974]: DEBUG nova.network.neutron [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Successfully created port: 05f49d80-b505-45bb-a0d6-b0a4d913c71e {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 642.148563] env[61974]: DEBUG nova.scheduler.client.report [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 642.160235] env[61974]: DEBUG nova.network.neutron [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.185979] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4192808a-bd05-4d67-b3ff-f657227acfe4 tempest-ImagesNegativeTestJSON-18517784 tempest-ImagesNegativeTestJSON-18517784-project-member] Lock "43e272e2-9256-4535-882e-3954574d5485" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.292s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.622365] env[61974]: DEBUG nova.compute.manager [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 642.655118] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.078s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.655118] env[61974]: ERROR nova.compute.manager [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 153aafe9-3dae-424f-aa21-50e0b8afbe7e, please check neutron logs for more information. [ 642.655118] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Traceback (most recent call last): [ 642.655118] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 642.655118] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] self.driver.spawn(context, instance, image_meta, [ 642.655118] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 642.655118] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 642.655118] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 642.655118] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] vm_ref = self.build_virtual_machine(instance, [ 642.656066] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 642.656066] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] vif_infos = vmwarevif.get_vif_info(self._session, [ 642.656066] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 642.656066] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] for vif in network_info: [ 642.656066] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 642.656066] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] return self._sync_wrapper(fn, *args, **kwargs) [ 642.656066] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 642.656066] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] self.wait() [ 642.656066] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 642.656066] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] self[:] = self._gt.wait() [ 642.656066] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 642.656066] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] return self._exit_event.wait() [ 642.656066] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 642.656524] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] result = hub.switch() [ 642.656524] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 642.656524] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] return self.greenlet.switch() [ 642.656524] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 642.656524] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] result = function(*args, **kwargs) [ 642.656524] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 642.656524] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] return func(*args, **kwargs) [ 642.656524] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 642.656524] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] raise e [ 642.656524] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 642.656524] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] nwinfo = self.network_api.allocate_for_instance( [ 642.656524] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 642.656524] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] created_port_ids = self._update_ports_for_instance( [ 642.656897] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 642.656897] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] with excutils.save_and_reraise_exception(): [ 642.656897] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 642.656897] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] self.force_reraise() [ 642.656897] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 642.656897] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] raise self.value [ 642.656897] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 642.656897] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] updated_port = self._update_port( [ 642.656897] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 642.656897] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] _ensure_no_port_binding_failure(port) [ 642.656897] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 642.656897] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] raise exception.PortBindingFailed(port_id=port['id']) [ 642.657397] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] nova.exception.PortBindingFailed: Binding failed for port 153aafe9-3dae-424f-aa21-50e0b8afbe7e, please check neutron logs for more information. [ 642.657397] env[61974]: ERROR nova.compute.manager [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] [ 642.657397] env[61974]: DEBUG nova.compute.utils [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Binding failed for port 153aafe9-3dae-424f-aa21-50e0b8afbe7e, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 642.661157] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.720s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.662675] env[61974]: INFO nova.compute.claims [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 642.665432] env[61974]: DEBUG nova.compute.manager [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Build of instance 2c4e7c81-67fd-4c5f-9e96-7256c26b228a was re-scheduled: Binding failed for port 153aafe9-3dae-424f-aa21-50e0b8afbe7e, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 642.666698] env[61974]: DEBUG nova.compute.manager [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 642.667088] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Acquiring lock "refresh_cache-2c4e7c81-67fd-4c5f-9e96-7256c26b228a" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.667276] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Acquired lock "refresh_cache-2c4e7c81-67fd-4c5f-9e96-7256c26b228a" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.667536] env[61974]: DEBUG nova.network.neutron [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 642.669443] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Releasing lock "refresh_cache-62413031-5c7a-498a-9aee-5d9015ef1574" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 642.669944] env[61974]: DEBUG nova.compute.manager [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 642.670186] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 642.673092] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3e287f51-7015-4d39-af1e-dda7765310cb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.681239] env[61974]: DEBUG nova.virt.hardware [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:54:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1390371125',id=24,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-16056222',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 642.682086] env[61974]: DEBUG nova.virt.hardware [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 642.682357] env[61974]: DEBUG nova.virt.hardware [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 642.686442] env[61974]: DEBUG nova.virt.hardware [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 642.686442] env[61974]: DEBUG nova.virt.hardware [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 642.686442] env[61974]: DEBUG nova.virt.hardware [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 642.686442] env[61974]: DEBUG nova.virt.hardware [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 642.686442] env[61974]: DEBUG nova.virt.hardware [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 642.686605] env[61974]: DEBUG nova.virt.hardware [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 642.686605] env[61974]: DEBUG nova.virt.hardware [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 642.686605] env[61974]: DEBUG nova.virt.hardware [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 642.686605] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3845ec83-8b1c-439e-86be-1b0fcb0b2d66 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.695333] env[61974]: DEBUG nova.compute.manager [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 642.703435] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db9015e-86c6-4654-9e23-baf551798ba3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.725656] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4934d31-98dc-44e2-ab21-552ae7347e3b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.739827] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 62413031-5c7a-498a-9aee-5d9015ef1574 could not be found. [ 642.740117] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 642.741310] env[61974]: INFO nova.compute.manager [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Took 0.07 seconds to destroy the instance on the hypervisor. [ 642.741310] env[61974]: DEBUG oslo.service.loopingcall [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 642.741460] env[61974]: DEBUG nova.compute.manager [-] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 642.741530] env[61974]: DEBUG nova.network.neutron [-] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 642.796237] env[61974]: DEBUG nova.network.neutron [-] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 642.808253] env[61974]: DEBUG nova.compute.manager [req-9231e2f5-8eff-4b72-97c1-0b9c06644b44 req-2666c40e-efa5-4887-b664-a664ac78cf54 service nova] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Received event network-vif-deleted-e78a7d88-ebd9-4dcd-b1cc-61d929b8ef15 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 643.225870] env[61974]: DEBUG nova.network.neutron [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 643.248292] env[61974]: DEBUG oslo_concurrency.lockutils [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.304463] env[61974]: DEBUG nova.network.neutron [-] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.515232] env[61974]: DEBUG nova.network.neutron [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.553061] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Acquiring lock "99a03824-dd33-4916-84f7-4c911a98c9d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.553846] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Lock "99a03824-dd33-4916-84f7-4c911a98c9d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.592134] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Acquiring lock "3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.592361] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Lock "3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.675297] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Acquiring lock "a333f129-6a86-4715-83e2-79543620d013" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.675297] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Lock "a333f129-6a86-4715-83e2-79543620d013" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.810039] env[61974]: INFO nova.compute.manager [-] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Took 1.07 seconds to deallocate network for instance. [ 643.815969] env[61974]: DEBUG nova.compute.claims [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 643.816250] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.020917] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Releasing lock "refresh_cache-2c4e7c81-67fd-4c5f-9e96-7256c26b228a" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.021193] env[61974]: DEBUG nova.compute.manager [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 644.021490] env[61974]: DEBUG nova.compute.manager [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 644.021540] env[61974]: DEBUG nova.network.neutron [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 644.065168] env[61974]: DEBUG nova.network.neutron [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 644.083401] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48eb3584-5314-45ed-8abd-5b2e5580b130 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.095447] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ea06f4-99f1-432c-8e53-63ffbaa24ee8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.133294] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5a3ad9-690b-4927-8d75-9b24be5c8df3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.145822] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86a72fa-ec56-4d64-8a62-f3476231fdb5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.160735] env[61974]: DEBUG nova.compute.provider_tree [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 644.573451] env[61974]: DEBUG nova.network.neutron [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.664440] env[61974]: DEBUG nova.scheduler.client.report [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 645.031741] env[61974]: DEBUG nova.compute.manager [req-3775fb7e-c32a-433c-8f69-1d7dfeb95ce2 req-8e1f81c0-f558-4f6a-8af5-0cf437b1fcdf service nova] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Received event network-changed-05f49d80-b505-45bb-a0d6-b0a4d913c71e {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 645.031931] env[61974]: DEBUG nova.compute.manager [req-3775fb7e-c32a-433c-8f69-1d7dfeb95ce2 req-8e1f81c0-f558-4f6a-8af5-0cf437b1fcdf service nova] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Refreshing instance network info cache due to event network-changed-05f49d80-b505-45bb-a0d6-b0a4d913c71e. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 645.032164] env[61974]: DEBUG oslo_concurrency.lockutils [req-3775fb7e-c32a-433c-8f69-1d7dfeb95ce2 req-8e1f81c0-f558-4f6a-8af5-0cf437b1fcdf service nova] Acquiring lock "refresh_cache-622aca09-aab9-4e93-b4d3-621d33df7903" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.032303] env[61974]: DEBUG oslo_concurrency.lockutils [req-3775fb7e-c32a-433c-8f69-1d7dfeb95ce2 req-8e1f81c0-f558-4f6a-8af5-0cf437b1fcdf service nova] Acquired lock "refresh_cache-622aca09-aab9-4e93-b4d3-621d33df7903" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.032453] env[61974]: DEBUG nova.network.neutron [req-3775fb7e-c32a-433c-8f69-1d7dfeb95ce2 req-8e1f81c0-f558-4f6a-8af5-0cf437b1fcdf service nova] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Refreshing network info cache for port 05f49d80-b505-45bb-a0d6-b0a4d913c71e {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 645.075702] env[61974]: INFO nova.compute.manager [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] [instance: 2c4e7c81-67fd-4c5f-9e96-7256c26b228a] Took 1.05 seconds to deallocate network for instance. [ 645.114844] env[61974]: ERROR nova.compute.manager [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 05f49d80-b505-45bb-a0d6-b0a4d913c71e, please check neutron logs for more information. [ 645.114844] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 645.114844] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 645.114844] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 645.114844] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 645.114844] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 645.114844] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 645.114844] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 645.114844] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.114844] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 645.114844] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.114844] env[61974]: ERROR nova.compute.manager raise self.value [ 645.114844] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 645.114844] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 645.114844] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.114844] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 645.115314] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.115314] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 645.115314] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 05f49d80-b505-45bb-a0d6-b0a4d913c71e, please check neutron logs for more information. [ 645.115314] env[61974]: ERROR nova.compute.manager [ 645.115314] env[61974]: Traceback (most recent call last): [ 645.115314] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 645.115314] env[61974]: listener.cb(fileno) [ 645.115314] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 645.115314] env[61974]: result = function(*args, **kwargs) [ 645.115314] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 645.115314] env[61974]: return func(*args, **kwargs) [ 645.115314] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 645.115314] env[61974]: raise e [ 645.115314] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 645.115314] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 645.115314] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 645.115314] env[61974]: created_port_ids = self._update_ports_for_instance( [ 645.115314] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 645.115314] env[61974]: with excutils.save_and_reraise_exception(): [ 645.115314] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.115314] env[61974]: self.force_reraise() [ 645.115314] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.115314] env[61974]: raise self.value [ 645.115314] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 645.115314] env[61974]: updated_port = self._update_port( [ 645.115314] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.115314] env[61974]: _ensure_no_port_binding_failure(port) [ 645.115314] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.115314] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 645.116185] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 05f49d80-b505-45bb-a0d6-b0a4d913c71e, please check neutron logs for more information. [ 645.116185] env[61974]: Removing descriptor: 20 [ 645.121450] env[61974]: ERROR nova.compute.manager [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 05f49d80-b505-45bb-a0d6-b0a4d913c71e, please check neutron logs for more information. [ 645.121450] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Traceback (most recent call last): [ 645.121450] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 645.121450] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] yield resources [ 645.121450] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 645.121450] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] self.driver.spawn(context, instance, image_meta, [ 645.121450] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 645.121450] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] self._vmops.spawn(context, instance, image_meta, injected_files, [ 645.121450] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 645.121450] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] vm_ref = self.build_virtual_machine(instance, [ 645.121450] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 645.122159] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] vif_infos = vmwarevif.get_vif_info(self._session, [ 645.122159] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 645.122159] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] for vif in network_info: [ 645.122159] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 645.122159] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] return self._sync_wrapper(fn, *args, **kwargs) [ 645.122159] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 645.122159] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] self.wait() [ 645.122159] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 645.122159] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] self[:] = self._gt.wait() [ 645.122159] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 645.122159] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] return self._exit_event.wait() [ 645.122159] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 645.122159] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] result = hub.switch() [ 645.122673] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 645.122673] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] return self.greenlet.switch() [ 645.122673] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 645.122673] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] result = function(*args, **kwargs) [ 645.122673] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 645.122673] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] return func(*args, **kwargs) [ 645.122673] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 645.122673] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] raise e [ 645.122673] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 645.122673] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] nwinfo = self.network_api.allocate_for_instance( [ 645.122673] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 645.122673] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] created_port_ids = self._update_ports_for_instance( [ 645.122673] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 645.123204] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] with excutils.save_and_reraise_exception(): [ 645.123204] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.123204] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] self.force_reraise() [ 645.123204] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.123204] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] raise self.value [ 645.123204] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 645.123204] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] updated_port = self._update_port( [ 645.123204] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.123204] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] _ensure_no_port_binding_failure(port) [ 645.123204] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.123204] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] raise exception.PortBindingFailed(port_id=port['id']) [ 645.123204] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] nova.exception.PortBindingFailed: Binding failed for port 05f49d80-b505-45bb-a0d6-b0a4d913c71e, please check neutron logs for more information. [ 645.123204] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] [ 645.123840] env[61974]: INFO nova.compute.manager [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Terminating instance [ 645.123840] env[61974]: DEBUG oslo_concurrency.lockutils [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Acquiring lock "refresh_cache-622aca09-aab9-4e93-b4d3-621d33df7903" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.169776] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.509s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.171898] env[61974]: DEBUG nova.compute.manager [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 645.179021] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.346s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.179021] env[61974]: INFO nova.compute.claims [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 645.575380] env[61974]: DEBUG nova.network.neutron [req-3775fb7e-c32a-433c-8f69-1d7dfeb95ce2 req-8e1f81c0-f558-4f6a-8af5-0cf437b1fcdf service nova] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 645.683855] env[61974]: DEBUG nova.compute.utils [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 645.685321] env[61974]: DEBUG nova.compute.manager [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Not allocating networking since 'none' was specified. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 645.804935] env[61974]: DEBUG nova.network.neutron [req-3775fb7e-c32a-433c-8f69-1d7dfeb95ce2 req-8e1f81c0-f558-4f6a-8af5-0cf437b1fcdf service nova] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.122472] env[61974]: INFO nova.scheduler.client.report [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Deleted allocations for instance 2c4e7c81-67fd-4c5f-9e96-7256c26b228a [ 646.190113] env[61974]: DEBUG nova.compute.manager [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 646.310123] env[61974]: DEBUG oslo_concurrency.lockutils [req-3775fb7e-c32a-433c-8f69-1d7dfeb95ce2 req-8e1f81c0-f558-4f6a-8af5-0cf437b1fcdf service nova] Releasing lock "refresh_cache-622aca09-aab9-4e93-b4d3-621d33df7903" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 646.310595] env[61974]: DEBUG oslo_concurrency.lockutils [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Acquired lock "refresh_cache-622aca09-aab9-4e93-b4d3-621d33df7903" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.310807] env[61974]: DEBUG nova.network.neutron [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 646.576800] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Acquiring lock "7a465c7e-874d-4cd1-9c23-0ae249997114" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.577143] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Lock "7a465c7e-874d-4cd1-9c23-0ae249997114" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.628069] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f36a725-f1ee-4c08-9b30-9cff59a55d1d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.633071] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a890daba-debf-40ac-9219-d0d4cc86307c tempest-FloatingIPsAssociationNegativeTestJSON-1660602523 tempest-FloatingIPsAssociationNegativeTestJSON-1660602523-project-member] Lock "2c4e7c81-67fd-4c5f-9e96-7256c26b228a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.229s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.638820] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86545604-8534-463c-897f-f83538769921 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.679856] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5507dd48-f51d-4f6d-9806-90d86e423f5c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.687179] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b876e0-907a-4dba-a7bd-9944591a3d78 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.704365] env[61974]: DEBUG nova.compute.provider_tree [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 646.843955] env[61974]: DEBUG nova.network.neutron [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.095012] env[61974]: DEBUG nova.network.neutron [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.138982] env[61974]: DEBUG nova.compute.manager [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 647.207668] env[61974]: DEBUG nova.compute.manager [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 647.210736] env[61974]: DEBUG nova.scheduler.client.report [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 647.252256] env[61974]: DEBUG nova.virt.hardware [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 647.253055] env[61974]: DEBUG nova.virt.hardware [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 647.254552] env[61974]: DEBUG nova.virt.hardware [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 647.254965] env[61974]: DEBUG nova.virt.hardware [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 647.254965] env[61974]: DEBUG nova.virt.hardware [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 647.255144] env[61974]: DEBUG nova.virt.hardware [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 647.255403] env[61974]: DEBUG nova.virt.hardware [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 647.255762] env[61974]: DEBUG nova.virt.hardware [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 647.255835] env[61974]: DEBUG nova.virt.hardware [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 647.256282] env[61974]: DEBUG nova.virt.hardware [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 647.256364] env[61974]: DEBUG nova.virt.hardware [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 647.257620] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28178a2a-895b-43cc-ac32-a0e6ccf094a5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.266824] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9502cc-ff37-4958-b868-e27a86a649c1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.284749] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Instance VIF info [] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 647.294308] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 647.295519] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32d80785-2c6c-45d0-a2b9-24946a370a9a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.298706] env[61974]: DEBUG nova.compute.manager [req-667318ea-c3ae-4d94-a6f6-32c4941e0780 req-f32e1815-5594-47f7-9a1b-bc74ed94e0bb service nova] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Received event network-vif-deleted-05f49d80-b505-45bb-a0d6-b0a4d913c71e {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 647.311131] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Created folder: OpenStack in parent group-v4. [ 647.311342] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Creating folder: Project (e57b53a6de1342d58cdd6431940ead2d). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 647.311580] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a4774d6c-31ca-48e6-a59b-cd9e5524f632 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.320607] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Created folder: Project (e57b53a6de1342d58cdd6431940ead2d) in parent group-v292912. [ 647.320811] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Creating folder: Instances. Parent ref: group-v292913. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 647.321151] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6acee9f7-2c18-482f-9cea-bc92f9bcbc23 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.330631] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Created folder: Instances in parent group-v292913. [ 647.330879] env[61974]: DEBUG oslo.service.loopingcall [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 647.331085] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 647.331296] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4eb3a9e-635d-4b19-bf68-95195ed06553 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.351345] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 647.351345] env[61974]: value = "task-1378885" [ 647.351345] env[61974]: _type = "Task" [ 647.351345] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.360096] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1378885, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.602318] env[61974]: DEBUG oslo_concurrency.lockutils [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Releasing lock "refresh_cache-622aca09-aab9-4e93-b4d3-621d33df7903" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 647.602318] env[61974]: DEBUG nova.compute.manager [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 647.602318] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 647.602661] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-090eaea7-888e-4c9f-975a-0f9e0d840cd4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.613799] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35b5990-5e57-408e-85ca-cc78ddb9eae8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.633647] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Acquiring lock "2e217cbc-4962-44c7-b054-b3ae135ef8bb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.633751] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Lock "2e217cbc-4962-44c7-b054-b3ae135ef8bb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.654712] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 622aca09-aab9-4e93-b4d3-621d33df7903 could not be found. [ 647.655311] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 647.655311] env[61974]: INFO nova.compute.manager [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Took 0.05 seconds to destroy the instance on the hypervisor. [ 647.655416] env[61974]: DEBUG oslo.service.loopingcall [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 647.657840] env[61974]: DEBUG nova.compute.manager [-] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 647.657840] env[61974]: DEBUG nova.network.neutron [-] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 647.678530] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.708222] env[61974]: DEBUG nova.network.neutron [-] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.722172] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.543s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.722172] env[61974]: DEBUG nova.compute.manager [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 647.723168] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.499s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.724979] env[61974]: INFO nova.compute.claims [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 647.863420] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1378885, 'name': CreateVM_Task, 'duration_secs': 0.313229} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.863854] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 647.867028] env[61974]: DEBUG oslo_vmware.service [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7380cd3a-6a8f-4561-a919-e2e3db2d4a25 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.875317] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.879279] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.879991] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 647.880387] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70e6f3c6-bffe-4b4c-9739-e61a77a87d34 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.887366] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Waiting for the task: (returnval){ [ 647.887366] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52a0c803-8717-95c5-ff74-9f504ae6d7b8" [ 647.887366] env[61974]: _type = "Task" [ 647.887366] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.896788] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52a0c803-8717-95c5-ff74-9f504ae6d7b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.210274] env[61974]: DEBUG nova.network.neutron [-] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.229525] env[61974]: DEBUG nova.compute.utils [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 648.235929] env[61974]: DEBUG nova.compute.manager [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 648.236299] env[61974]: DEBUG nova.network.neutron [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 648.396018] env[61974]: DEBUG nova.policy [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3043c6656f9b40bdac3c7138447b8415', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4f97689f25c4410083e17eceda464da5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 648.403676] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.403924] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 648.404168] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 648.404314] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.404951] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 648.404951] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41bedb61-7081-4ff6-b0c8-6ff31897a1f1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.425232] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 648.425232] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 648.426481] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52225d91-4641-4752-ac56-5dfa01f51e4c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.433893] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54339e9f-d3a1-4718-afc2-3e63425959eb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.439377] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Waiting for the task: (returnval){ [ 648.439377] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]522ed073-73fd-2f7e-f7d3-0f61aea6f7f7" [ 648.439377] env[61974]: _type = "Task" [ 648.439377] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.448643] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]522ed073-73fd-2f7e-f7d3-0f61aea6f7f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.712762] env[61974]: INFO nova.compute.manager [-] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Took 1.05 seconds to deallocate network for instance. [ 648.715436] env[61974]: DEBUG nova.compute.claims [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 648.715526] env[61974]: DEBUG oslo_concurrency.lockutils [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.735840] env[61974]: DEBUG nova.compute.manager [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 648.956679] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Preparing fetch location {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 648.956953] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Creating directory with path [datastore1] vmware_temp/ae44e26d-4f9c-4cef-a6e5-1df275782cf7/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 648.958032] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c007f685-dfc5-43f6-ac73-0fde6abe6665 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.988954] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Created directory with path [datastore1] vmware_temp/ae44e26d-4f9c-4cef-a6e5-1df275782cf7/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 648.988954] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Fetch image to [datastore1] vmware_temp/ae44e26d-4f9c-4cef-a6e5-1df275782cf7/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/tmp-sparse.vmdk {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 648.988954] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Downloading image file data 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 to [datastore1] vmware_temp/ae44e26d-4f9c-4cef-a6e5-1df275782cf7/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/tmp-sparse.vmdk on the data store datastore1 {{(pid=61974) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 648.989975] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e863a2a-9975-476e-b3e0-ac2b4772f60f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.998411] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51c7701d-8a4d-46de-8662-fb6f6332bfd2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.012472] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05fcb300-a245-4cf3-bfd7-745e7025402c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.053498] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5413d5da-cbd9-461b-8fb4-be349816b29b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.060894] env[61974]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3622bdc9-8865-4898-919b-8ee970a1501d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.080851] env[61974]: DEBUG nova.virt.vmwareapi.images [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Downloading image file data 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 to the data store datastore1 {{(pid=61974) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 649.167546] env[61974]: DEBUG oslo_vmware.rw_handles [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ae44e26d-4f9c-4cef-a6e5-1df275782cf7/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61974) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 649.338078] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a2c0a8-6dbc-45b1-9727-6987d848a759 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.352054] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ef857c-3636-41a4-9a99-aed923ca81a0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.389994] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a30e26a5-0de8-4ed8-9ea1-c9621639d6b2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.400255] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74cf1409-5b25-4ec5-bc0b-9d7890d9332e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.418020] env[61974]: DEBUG nova.compute.provider_tree [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.655387] env[61974]: DEBUG nova.network.neutron [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Successfully created port: 7167262b-7aaa-416e-9df6-32a4d288a9a7 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 649.750099] env[61974]: DEBUG nova.compute.manager [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 649.788904] env[61974]: DEBUG nova.virt.hardware [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 649.789080] env[61974]: DEBUG nova.virt.hardware [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 649.789252] env[61974]: DEBUG nova.virt.hardware [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 649.789433] env[61974]: DEBUG nova.virt.hardware [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 649.789577] env[61974]: DEBUG nova.virt.hardware [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 649.789723] env[61974]: DEBUG nova.virt.hardware [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 649.790077] env[61974]: DEBUG nova.virt.hardware [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 649.790167] env[61974]: DEBUG nova.virt.hardware [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 649.790289] env[61974]: DEBUG nova.virt.hardware [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 649.790470] env[61974]: DEBUG nova.virt.hardware [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 649.790568] env[61974]: DEBUG nova.virt.hardware [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 649.791467] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793da102-6fb4-4b12-8cd7-9db968f58ec5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.803036] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b8d1c1-1c77-4ffc-ad91-739f8da2aec9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.922710] env[61974]: DEBUG nova.scheduler.client.report [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 650.014286] env[61974]: DEBUG oslo_vmware.rw_handles [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Completed reading data from the image iterator. {{(pid=61974) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 650.014286] env[61974]: DEBUG oslo_vmware.rw_handles [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ae44e26d-4f9c-4cef-a6e5-1df275782cf7/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 650.070728] env[61974]: DEBUG nova.virt.vmwareapi.images [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Downloaded image file data 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 to vmware_temp/ae44e26d-4f9c-4cef-a6e5-1df275782cf7/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/tmp-sparse.vmdk on the data store datastore1 {{(pid=61974) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 650.073289] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Caching image {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 650.073289] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Copying Virtual Disk [datastore1] vmware_temp/ae44e26d-4f9c-4cef-a6e5-1df275782cf7/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/tmp-sparse.vmdk to [datastore1] vmware_temp/ae44e26d-4f9c-4cef-a6e5-1df275782cf7/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 650.077015] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-670b508b-f8d8-4d05-860f-3698a5d8ae67 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.083920] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Waiting for the task: (returnval){ [ 650.083920] env[61974]: value = "task-1378886" [ 650.083920] env[61974]: _type = "Task" [ 650.083920] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.094191] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378886, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.431154] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.707s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.431154] env[61974]: DEBUG nova.compute.manager [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 650.434916] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.362s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.438013] env[61974]: INFO nova.compute.claims [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 650.596613] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378886, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.936381] env[61974]: DEBUG nova.compute.utils [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 650.938199] env[61974]: DEBUG nova.compute.manager [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 650.938446] env[61974]: DEBUG nova.network.neutron [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 651.095936] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378886, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.757773} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.096298] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Copied Virtual Disk [datastore1] vmware_temp/ae44e26d-4f9c-4cef-a6e5-1df275782cf7/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/tmp-sparse.vmdk to [datastore1] vmware_temp/ae44e26d-4f9c-4cef-a6e5-1df275782cf7/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 651.096540] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Deleting the datastore file [datastore1] vmware_temp/ae44e26d-4f9c-4cef-a6e5-1df275782cf7/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/tmp-sparse.vmdk {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 651.096828] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e9ca004-6e11-44ec-922a-0142e0169cc5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.103937] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Waiting for the task: (returnval){ [ 651.103937] env[61974]: value = "task-1378887" [ 651.103937] env[61974]: _type = "Task" [ 651.103937] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.112504] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378887, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.241514] env[61974]: DEBUG nova.policy [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3cb1c25145d348c08f6f55996fd0fa57', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ff787609dc6245eda64102cb111123ff', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 651.444312] env[61974]: DEBUG nova.compute.manager [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 651.613082] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378887, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.023299} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.615560] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 651.615775] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Moving file from [datastore1] vmware_temp/ae44e26d-4f9c-4cef-a6e5-1df275782cf7/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 to [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8. {{(pid=61974) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 651.617284] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-58736cfb-1dea-48bb-a072-25a995496a80 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.626759] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Waiting for the task: (returnval){ [ 651.626759] env[61974]: value = "task-1378888" [ 651.626759] env[61974]: _type = "Task" [ 651.626759] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.641090] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378888, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.861691] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d56f8583-2ce1-46ec-b98d-82f8d9454d16 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.871325] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb8ce1a-b19b-46e2-bc84-f0cf397d425a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.908485] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-646f1bdb-f98d-4158-b88c-719739f9bf6a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.916614] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91124136-fbc5-4cd6-9f9e-1fe9e0c7b8f3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.931511] env[61974]: DEBUG nova.compute.provider_tree [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 652.001062] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Acquiring lock "d334c2d8-15d8-4f70-9a85-312687d1b337" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.001399] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Lock "d334c2d8-15d8-4f70-9a85-312687d1b337" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.072168] env[61974]: DEBUG nova.network.neutron [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Successfully created port: 81241c22-96f1-4162-9b50-5dcc59b8615c {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 652.141578] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378888, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.028044} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.141909] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] File moved {{(pid=61974) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 652.142160] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Cleaning up location [datastore1] vmware_temp/ae44e26d-4f9c-4cef-a6e5-1df275782cf7 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 652.142367] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Deleting the datastore file [datastore1] vmware_temp/ae44e26d-4f9c-4cef-a6e5-1df275782cf7 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 652.142650] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cba8a01b-80c6-4d37-a406-c84694d1e045 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.149105] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Waiting for the task: (returnval){ [ 652.149105] env[61974]: value = "task-1378889" [ 652.149105] env[61974]: _type = "Task" [ 652.149105] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.159839] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378889, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.394539] env[61974]: ERROR nova.compute.manager [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7167262b-7aaa-416e-9df6-32a4d288a9a7, please check neutron logs for more information. [ 652.394539] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 652.394539] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 652.394539] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 652.394539] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 652.394539] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 652.394539] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 652.394539] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 652.394539] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 652.394539] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 652.394539] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 652.394539] env[61974]: ERROR nova.compute.manager raise self.value [ 652.394539] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 652.394539] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 652.394539] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 652.394539] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 652.395130] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 652.395130] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 652.395130] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7167262b-7aaa-416e-9df6-32a4d288a9a7, please check neutron logs for more information. [ 652.395130] env[61974]: ERROR nova.compute.manager [ 652.395130] env[61974]: Traceback (most recent call last): [ 652.395130] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 652.395130] env[61974]: listener.cb(fileno) [ 652.395130] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 652.395130] env[61974]: result = function(*args, **kwargs) [ 652.395130] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 652.395130] env[61974]: return func(*args, **kwargs) [ 652.395130] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 652.395130] env[61974]: raise e [ 652.395130] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 652.395130] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 652.395130] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 652.395130] env[61974]: created_port_ids = self._update_ports_for_instance( [ 652.395130] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 652.395130] env[61974]: with excutils.save_and_reraise_exception(): [ 652.395130] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 652.395130] env[61974]: self.force_reraise() [ 652.395130] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 652.395130] env[61974]: raise self.value [ 652.395130] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 652.395130] env[61974]: updated_port = self._update_port( [ 652.395130] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 652.395130] env[61974]: _ensure_no_port_binding_failure(port) [ 652.395130] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 652.395130] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 652.396220] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 7167262b-7aaa-416e-9df6-32a4d288a9a7, please check neutron logs for more information. [ 652.396220] env[61974]: Removing descriptor: 20 [ 652.396220] env[61974]: ERROR nova.compute.manager [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7167262b-7aaa-416e-9df6-32a4d288a9a7, please check neutron logs for more information. [ 652.396220] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Traceback (most recent call last): [ 652.396220] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 652.396220] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] yield resources [ 652.396220] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 652.396220] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] self.driver.spawn(context, instance, image_meta, [ 652.396220] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 652.396220] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 652.396220] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 652.396220] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] vm_ref = self.build_virtual_machine(instance, [ 652.396595] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 652.396595] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] vif_infos = vmwarevif.get_vif_info(self._session, [ 652.396595] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 652.396595] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] for vif in network_info: [ 652.396595] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 652.396595] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] return self._sync_wrapper(fn, *args, **kwargs) [ 652.396595] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 652.396595] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] self.wait() [ 652.396595] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 652.396595] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] self[:] = self._gt.wait() [ 652.396595] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 652.396595] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] return self._exit_event.wait() [ 652.396595] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 652.396956] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] result = hub.switch() [ 652.396956] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 652.396956] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] return self.greenlet.switch() [ 652.396956] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 652.396956] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] result = function(*args, **kwargs) [ 652.396956] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 652.396956] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] return func(*args, **kwargs) [ 652.396956] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 652.396956] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] raise e [ 652.396956] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 652.396956] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] nwinfo = self.network_api.allocate_for_instance( [ 652.396956] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 652.396956] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] created_port_ids = self._update_ports_for_instance( [ 652.397339] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 652.397339] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] with excutils.save_and_reraise_exception(): [ 652.397339] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 652.397339] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] self.force_reraise() [ 652.397339] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 652.397339] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] raise self.value [ 652.397339] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 652.397339] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] updated_port = self._update_port( [ 652.397339] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 652.397339] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] _ensure_no_port_binding_failure(port) [ 652.397339] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 652.397339] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] raise exception.PortBindingFailed(port_id=port['id']) [ 652.397664] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] nova.exception.PortBindingFailed: Binding failed for port 7167262b-7aaa-416e-9df6-32a4d288a9a7, please check neutron logs for more information. [ 652.397664] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] [ 652.397664] env[61974]: INFO nova.compute.manager [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Terminating instance [ 652.399428] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Acquiring lock "refresh_cache-ef64bb0a-d462-4218-9ddf-7c019727f2ba" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.399428] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Acquired lock "refresh_cache-ef64bb0a-d462-4218-9ddf-7c019727f2ba" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.399428] env[61974]: DEBUG nova.network.neutron [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 652.435210] env[61974]: DEBUG nova.scheduler.client.report [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 652.460937] env[61974]: DEBUG nova.compute.manager [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 652.503208] env[61974]: DEBUG nova.virt.hardware [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 652.503500] env[61974]: DEBUG nova.virt.hardware [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 652.503663] env[61974]: DEBUG nova.virt.hardware [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 652.503871] env[61974]: DEBUG nova.virt.hardware [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 652.505449] env[61974]: DEBUG nova.virt.hardware [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 652.505748] env[61974]: DEBUG nova.virt.hardware [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 652.506051] env[61974]: DEBUG nova.virt.hardware [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 652.506277] env[61974]: DEBUG nova.virt.hardware [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 652.506447] env[61974]: DEBUG nova.virt.hardware [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 652.508180] env[61974]: DEBUG nova.virt.hardware [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 652.508180] env[61974]: DEBUG nova.virt.hardware [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 652.508180] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e60602-bdde-40c6-b1eb-c96475e8be47 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.524656] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89478a20-1f89-4e37-9c12-4caa8940bbcc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.667377] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378889, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024136} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.667673] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 652.668706] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90549117-aad8-48d1-b080-b395239ed80b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.675486] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Waiting for the task: (returnval){ [ 652.675486] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52a0dfc8-4bd6-2051-519a-c004704373c0" [ 652.675486] env[61974]: _type = "Task" [ 652.675486] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.686891] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52a0dfc8-4bd6-2051-519a-c004704373c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.792825] env[61974]: DEBUG nova.compute.manager [req-ed82cf4c-c643-4715-9328-3e7202ac6818 req-2660dc81-2b17-4083-8f16-e2d9dfcdaece service nova] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Received event network-changed-7167262b-7aaa-416e-9df6-32a4d288a9a7 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 652.793048] env[61974]: DEBUG nova.compute.manager [req-ed82cf4c-c643-4715-9328-3e7202ac6818 req-2660dc81-2b17-4083-8f16-e2d9dfcdaece service nova] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Refreshing instance network info cache due to event network-changed-7167262b-7aaa-416e-9df6-32a4d288a9a7. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 652.793315] env[61974]: DEBUG oslo_concurrency.lockutils [req-ed82cf4c-c643-4715-9328-3e7202ac6818 req-2660dc81-2b17-4083-8f16-e2d9dfcdaece service nova] Acquiring lock "refresh_cache-ef64bb0a-d462-4218-9ddf-7c019727f2ba" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.940434] env[61974]: DEBUG nova.network.neutron [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 652.951716] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.510s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.951716] env[61974]: DEBUG nova.compute.manager [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 652.957690] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.312s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.961360] env[61974]: INFO nova.compute.claims [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 653.143636] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Acquiring lock "a1c488d6-4eb4-4362-84cd-68151a47d3bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.144462] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Lock "a1c488d6-4eb4-4362-84cd-68151a47d3bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.189036] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52a0dfc8-4bd6-2051-519a-c004704373c0, 'name': SearchDatastore_Task, 'duration_secs': 0.008793} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.189036] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.189492] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 41fccade-6e5f-4642-8889-2ce00dbff1c7/41fccade-6e5f-4642-8889-2ce00dbff1c7.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 653.189946] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-094dbf9d-2aa1-497c-9c79-5ba74853736c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.197490] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Waiting for the task: (returnval){ [ 653.197490] env[61974]: value = "task-1378890" [ 653.197490] env[61974]: _type = "Task" [ 653.197490] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.199257] env[61974]: DEBUG nova.network.neutron [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.212956] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378890, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.462722] env[61974]: DEBUG nova.compute.utils [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 653.462994] env[61974]: DEBUG nova.compute.manager [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 653.465390] env[61974]: DEBUG nova.network.neutron [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 653.620644] env[61974]: DEBUG nova.policy [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43a80d1cea644af6ba3aa72cb78aae8e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '27923c4893664216bb7529d6b9879154', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 653.706356] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Releasing lock "refresh_cache-ef64bb0a-d462-4218-9ddf-7c019727f2ba" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.707633] env[61974]: DEBUG nova.compute.manager [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 653.707969] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 653.709750] env[61974]: DEBUG oslo_concurrency.lockutils [req-ed82cf4c-c643-4715-9328-3e7202ac6818 req-2660dc81-2b17-4083-8f16-e2d9dfcdaece service nova] Acquired lock "refresh_cache-ef64bb0a-d462-4218-9ddf-7c019727f2ba" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.710056] env[61974]: DEBUG nova.network.neutron [req-ed82cf4c-c643-4715-9328-3e7202ac6818 req-2660dc81-2b17-4083-8f16-e2d9dfcdaece service nova] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Refreshing network info cache for port 7167262b-7aaa-416e-9df6-32a4d288a9a7 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 653.714228] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e281cf71-c673-4b19-87df-a976b9008b8f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.725175] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378890, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513561} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.727280] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 41fccade-6e5f-4642-8889-2ce00dbff1c7/41fccade-6e5f-4642-8889-2ce00dbff1c7.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 653.727280] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 653.728631] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-46b28530-715d-4872-bd90-413637c6fb09 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.736175] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e923abf-fc3e-4e95-bee8-306ac6ca9ee2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.763452] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ef64bb0a-d462-4218-9ddf-7c019727f2ba could not be found. [ 653.763692] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 653.764869] env[61974]: INFO nova.compute.manager [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Took 0.06 seconds to destroy the instance on the hypervisor. [ 653.764869] env[61974]: DEBUG oslo.service.loopingcall [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 653.766300] env[61974]: DEBUG nova.compute.manager [-] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 653.766411] env[61974]: DEBUG nova.network.neutron [-] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 653.768861] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Waiting for the task: (returnval){ [ 653.768861] env[61974]: value = "task-1378891" [ 653.768861] env[61974]: _type = "Task" [ 653.768861] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.777840] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378891, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.802584] env[61974]: DEBUG nova.network.neutron [-] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 653.969112] env[61974]: DEBUG nova.compute.manager [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 654.260962] env[61974]: DEBUG nova.network.neutron [req-ed82cf4c-c643-4715-9328-3e7202ac6818 req-2660dc81-2b17-4083-8f16-e2d9dfcdaece service nova] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 654.283692] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378891, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078035} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.284096] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 654.285047] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970fe107-9104-4197-b71c-2a1b6e7b94ec {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.313855] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] 41fccade-6e5f-4642-8889-2ce00dbff1c7/41fccade-6e5f-4642-8889-2ce00dbff1c7.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 654.317232] env[61974]: DEBUG nova.network.neutron [-] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.318348] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e962fd0f-f69a-4821-8721-03e2115dfd3c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.335948] env[61974]: DEBUG nova.network.neutron [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Successfully created port: 137cbe69-c6b7-49c8-9037-a0fc85f4f4c1 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 654.339917] env[61974]: ERROR nova.compute.manager [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 81241c22-96f1-4162-9b50-5dcc59b8615c, please check neutron logs for more information. [ 654.339917] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 654.339917] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 654.339917] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 654.339917] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 654.339917] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 654.339917] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 654.339917] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 654.339917] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 654.339917] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 654.339917] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 654.339917] env[61974]: ERROR nova.compute.manager raise self.value [ 654.339917] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 654.339917] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 654.339917] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 654.339917] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 654.340613] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 654.340613] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 654.340613] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 81241c22-96f1-4162-9b50-5dcc59b8615c, please check neutron logs for more information. [ 654.340613] env[61974]: ERROR nova.compute.manager [ 654.340613] env[61974]: Traceback (most recent call last): [ 654.340613] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 654.340613] env[61974]: listener.cb(fileno) [ 654.340613] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 654.340613] env[61974]: result = function(*args, **kwargs) [ 654.340613] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 654.340613] env[61974]: return func(*args, **kwargs) [ 654.340613] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 654.340613] env[61974]: raise e [ 654.340613] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 654.340613] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 654.340613] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 654.340613] env[61974]: created_port_ids = self._update_ports_for_instance( [ 654.340613] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 654.340613] env[61974]: with excutils.save_and_reraise_exception(): [ 654.340613] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 654.340613] env[61974]: self.force_reraise() [ 654.340613] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 654.340613] env[61974]: raise self.value [ 654.340613] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 654.340613] env[61974]: updated_port = self._update_port( [ 654.340613] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 654.340613] env[61974]: _ensure_no_port_binding_failure(port) [ 654.340613] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 654.340613] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 654.341509] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 81241c22-96f1-4162-9b50-5dcc59b8615c, please check neutron logs for more information. [ 654.341509] env[61974]: Removing descriptor: 18 [ 654.341509] env[61974]: INFO nova.compute.manager [-] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Took 0.57 seconds to deallocate network for instance. [ 654.341509] env[61974]: ERROR nova.compute.manager [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 81241c22-96f1-4162-9b50-5dcc59b8615c, please check neutron logs for more information. [ 654.341509] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] Traceback (most recent call last): [ 654.341509] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 654.341509] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] yield resources [ 654.341509] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 654.341509] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] self.driver.spawn(context, instance, image_meta, [ 654.341509] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 654.341509] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 654.341509] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 654.342011] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] vm_ref = self.build_virtual_machine(instance, [ 654.342011] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 654.342011] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] vif_infos = vmwarevif.get_vif_info(self._session, [ 654.342011] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 654.342011] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] for vif in network_info: [ 654.342011] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 654.342011] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] return self._sync_wrapper(fn, *args, **kwargs) [ 654.342011] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 654.342011] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] self.wait() [ 654.342011] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 654.342011] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] self[:] = self._gt.wait() [ 654.342011] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 654.342011] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] return self._exit_event.wait() [ 654.342400] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 654.342400] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] result = hub.switch() [ 654.342400] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 654.342400] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] return self.greenlet.switch() [ 654.342400] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 654.342400] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] result = function(*args, **kwargs) [ 654.342400] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 654.342400] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] return func(*args, **kwargs) [ 654.342400] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 654.342400] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] raise e [ 654.342400] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 654.342400] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] nwinfo = self.network_api.allocate_for_instance( [ 654.342400] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 654.342859] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] created_port_ids = self._update_ports_for_instance( [ 654.342859] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 654.342859] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] with excutils.save_and_reraise_exception(): [ 654.342859] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 654.342859] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] self.force_reraise() [ 654.342859] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 654.342859] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] raise self.value [ 654.342859] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 654.342859] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] updated_port = self._update_port( [ 654.342859] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 654.342859] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] _ensure_no_port_binding_failure(port) [ 654.342859] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 654.342859] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] raise exception.PortBindingFailed(port_id=port['id']) [ 654.343859] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] nova.exception.PortBindingFailed: Binding failed for port 81241c22-96f1-4162-9b50-5dcc59b8615c, please check neutron logs for more information. [ 654.343859] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] [ 654.343859] env[61974]: INFO nova.compute.manager [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Terminating instance [ 654.346506] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Acquiring lock "refresh_cache-84448f61-d302-428f-b995-e942e27c39fd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.346506] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Acquired lock "refresh_cache-84448f61-d302-428f-b995-e942e27c39fd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.346506] env[61974]: DEBUG nova.network.neutron [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 654.355747] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Waiting for the task: (returnval){ [ 654.355747] env[61974]: value = "task-1378892" [ 654.355747] env[61974]: _type = "Task" [ 654.355747] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.369857] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378892, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.376649] env[61974]: DEBUG nova.compute.claims [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 654.376834] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.412377] env[61974]: DEBUG nova.network.neutron [req-ed82cf4c-c643-4715-9328-3e7202ac6818 req-2660dc81-2b17-4083-8f16-e2d9dfcdaece service nova] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.592844] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2fe9a5e-4976-4a7a-8f9d-a42b3b862b85 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.604973] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7922baf5-2e35-4373-b0c6-35ffae94080a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.636522] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ec204a-2c5f-4128-a758-46dcc17f5799 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.646291] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac65e422-d512-4bc4-9fa5-450d584988e2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.657861] env[61974]: DEBUG nova.compute.provider_tree [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 654.873798] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378892, 'name': ReconfigVM_Task, 'duration_secs': 0.362747} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.878221] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Reconfigured VM instance instance-0000000e to attach disk [datastore1] 41fccade-6e5f-4642-8889-2ce00dbff1c7/41fccade-6e5f-4642-8889-2ce00dbff1c7.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 654.879066] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-13501d75-7933-4e78-9dfa-ed715194d879 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.881627] env[61974]: DEBUG nova.network.neutron [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 654.889272] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Waiting for the task: (returnval){ [ 654.889272] env[61974]: value = "task-1378893" [ 654.889272] env[61974]: _type = "Task" [ 654.889272] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.898605] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378893, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.914407] env[61974]: DEBUG oslo_concurrency.lockutils [req-ed82cf4c-c643-4715-9328-3e7202ac6818 req-2660dc81-2b17-4083-8f16-e2d9dfcdaece service nova] Releasing lock "refresh_cache-ef64bb0a-d462-4218-9ddf-7c019727f2ba" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.980735] env[61974]: DEBUG nova.network.neutron [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.984794] env[61974]: DEBUG nova.compute.manager [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 655.037542] env[61974]: DEBUG nova.virt.hardware [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 655.037542] env[61974]: DEBUG nova.virt.hardware [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 655.037542] env[61974]: DEBUG nova.virt.hardware [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 655.037782] env[61974]: DEBUG nova.virt.hardware [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 655.037782] env[61974]: DEBUG nova.virt.hardware [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 655.037782] env[61974]: DEBUG nova.virt.hardware [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 655.037782] env[61974]: DEBUG nova.virt.hardware [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 655.037998] env[61974]: DEBUG nova.virt.hardware [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 655.039953] env[61974]: DEBUG nova.virt.hardware [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 655.041171] env[61974]: DEBUG nova.virt.hardware [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 655.041171] env[61974]: DEBUG nova.virt.hardware [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 655.041844] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a6ec4c-3425-4d72-b69f-62f8b4fef5bc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.052524] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22790afd-1e48-465b-88ce-cb591b38f328 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.086823] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "955ed729-6cbc-4a7a-9abf-c1078cd4ddbb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.087022] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "955ed729-6cbc-4a7a-9abf-c1078cd4ddbb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.160615] env[61974]: DEBUG nova.scheduler.client.report [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 655.371123] env[61974]: DEBUG nova.compute.manager [req-300108ac-0bb2-4306-a60a-c09995f56ca2 req-d563f292-966a-4608-8fc3-cecbd766b466 service nova] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Received event network-vif-deleted-7167262b-7aaa-416e-9df6-32a4d288a9a7 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 655.371590] env[61974]: DEBUG nova.compute.manager [req-300108ac-0bb2-4306-a60a-c09995f56ca2 req-d563f292-966a-4608-8fc3-cecbd766b466 service nova] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Received event network-changed-81241c22-96f1-4162-9b50-5dcc59b8615c {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 655.371708] env[61974]: DEBUG nova.compute.manager [req-300108ac-0bb2-4306-a60a-c09995f56ca2 req-d563f292-966a-4608-8fc3-cecbd766b466 service nova] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Refreshing instance network info cache due to event network-changed-81241c22-96f1-4162-9b50-5dcc59b8615c. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 655.371942] env[61974]: DEBUG oslo_concurrency.lockutils [req-300108ac-0bb2-4306-a60a-c09995f56ca2 req-d563f292-966a-4608-8fc3-cecbd766b466 service nova] Acquiring lock "refresh_cache-84448f61-d302-428f-b995-e942e27c39fd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.399392] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378893, 'name': Rename_Task, 'duration_secs': 0.14332} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.400024] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 655.400097] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-97c2454c-3765-4977-aec3-d863abb16dc9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.407571] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Waiting for the task: (returnval){ [ 655.407571] env[61974]: value = "task-1378894" [ 655.407571] env[61974]: _type = "Task" [ 655.407571] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.415131] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378894, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.493498] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Releasing lock "refresh_cache-84448f61-d302-428f-b995-e942e27c39fd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 655.494188] env[61974]: DEBUG nova.compute.manager [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 655.494742] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 655.495528] env[61974]: DEBUG oslo_concurrency.lockutils [req-300108ac-0bb2-4306-a60a-c09995f56ca2 req-d563f292-966a-4608-8fc3-cecbd766b466 service nova] Acquired lock "refresh_cache-84448f61-d302-428f-b995-e942e27c39fd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.495788] env[61974]: DEBUG nova.network.neutron [req-300108ac-0bb2-4306-a60a-c09995f56ca2 req-d563f292-966a-4608-8fc3-cecbd766b466 service nova] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Refreshing network info cache for port 81241c22-96f1-4162-9b50-5dcc59b8615c {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 655.498165] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ff354c22-e7fc-4e1e-b4fd-54bbbdc7d542 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.508334] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4618166-082e-4993-a99f-43437768aba1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.535925] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 84448f61-d302-428f-b995-e942e27c39fd could not be found. [ 655.536275] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 655.536887] env[61974]: INFO nova.compute.manager [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 655.536887] env[61974]: DEBUG oslo.service.loopingcall [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 655.537184] env[61974]: DEBUG nova.compute.manager [-] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 655.537391] env[61974]: DEBUG nova.network.neutron [-] [instance: 84448f61-d302-428f-b995-e942e27c39fd] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 655.557160] env[61974]: DEBUG nova.network.neutron [-] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 655.667342] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.709s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.667937] env[61974]: DEBUG nova.compute.manager [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 655.672804] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.776s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.796894] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "30455d07-4826-4561-a04f-1b4a2041402c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.797123] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "30455d07-4826-4561-a04f-1b4a2041402c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.900503] env[61974]: ERROR nova.compute.manager [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 137cbe69-c6b7-49c8-9037-a0fc85f4f4c1, please check neutron logs for more information. [ 655.900503] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 655.900503] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 655.900503] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 655.900503] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 655.900503] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 655.900503] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 655.900503] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 655.900503] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 655.900503] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 655.900503] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 655.900503] env[61974]: ERROR nova.compute.manager raise self.value [ 655.900503] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 655.900503] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 655.900503] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 655.900503] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 655.901098] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 655.901098] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 655.901098] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 137cbe69-c6b7-49c8-9037-a0fc85f4f4c1, please check neutron logs for more information. [ 655.901098] env[61974]: ERROR nova.compute.manager [ 655.901098] env[61974]: Traceback (most recent call last): [ 655.901098] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 655.901098] env[61974]: listener.cb(fileno) [ 655.901098] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 655.901098] env[61974]: result = function(*args, **kwargs) [ 655.901098] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 655.901098] env[61974]: return func(*args, **kwargs) [ 655.901098] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 655.901098] env[61974]: raise e [ 655.901098] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 655.901098] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 655.901098] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 655.901098] env[61974]: created_port_ids = self._update_ports_for_instance( [ 655.901098] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 655.901098] env[61974]: with excutils.save_and_reraise_exception(): [ 655.901098] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 655.901098] env[61974]: self.force_reraise() [ 655.901098] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 655.901098] env[61974]: raise self.value [ 655.901098] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 655.901098] env[61974]: updated_port = self._update_port( [ 655.901098] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 655.901098] env[61974]: _ensure_no_port_binding_failure(port) [ 655.901098] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 655.901098] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 655.902127] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 137cbe69-c6b7-49c8-9037-a0fc85f4f4c1, please check neutron logs for more information. [ 655.902127] env[61974]: Removing descriptor: 20 [ 655.902127] env[61974]: ERROR nova.compute.manager [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 137cbe69-c6b7-49c8-9037-a0fc85f4f4c1, please check neutron logs for more information. [ 655.902127] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Traceback (most recent call last): [ 655.902127] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 655.902127] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] yield resources [ 655.902127] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 655.902127] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] self.driver.spawn(context, instance, image_meta, [ 655.902127] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 655.902127] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] self._vmops.spawn(context, instance, image_meta, injected_files, [ 655.902127] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 655.902127] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] vm_ref = self.build_virtual_machine(instance, [ 655.902552] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 655.902552] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] vif_infos = vmwarevif.get_vif_info(self._session, [ 655.902552] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 655.902552] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] for vif in network_info: [ 655.902552] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 655.902552] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] return self._sync_wrapper(fn, *args, **kwargs) [ 655.902552] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 655.902552] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] self.wait() [ 655.902552] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 655.902552] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] self[:] = self._gt.wait() [ 655.902552] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 655.902552] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] return self._exit_event.wait() [ 655.902552] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 655.902973] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] result = hub.switch() [ 655.902973] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 655.902973] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] return self.greenlet.switch() [ 655.902973] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 655.902973] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] result = function(*args, **kwargs) [ 655.902973] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 655.902973] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] return func(*args, **kwargs) [ 655.902973] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 655.902973] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] raise e [ 655.902973] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 655.902973] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] nwinfo = self.network_api.allocate_for_instance( [ 655.902973] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 655.902973] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] created_port_ids = self._update_ports_for_instance( [ 655.903412] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 655.903412] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] with excutils.save_and_reraise_exception(): [ 655.903412] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 655.903412] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] self.force_reraise() [ 655.903412] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 655.903412] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] raise self.value [ 655.903412] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 655.903412] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] updated_port = self._update_port( [ 655.903412] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 655.903412] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] _ensure_no_port_binding_failure(port) [ 655.903412] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 655.903412] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] raise exception.PortBindingFailed(port_id=port['id']) [ 655.903851] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] nova.exception.PortBindingFailed: Binding failed for port 137cbe69-c6b7-49c8-9037-a0fc85f4f4c1, please check neutron logs for more information. [ 655.903851] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] [ 655.903851] env[61974]: INFO nova.compute.manager [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Terminating instance [ 655.903851] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Acquiring lock "refresh_cache-32b1f31b-1e2e-4f53-8e97-265f79a74899" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.903999] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Acquired lock "refresh_cache-32b1f31b-1e2e-4f53-8e97-265f79a74899" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.904949] env[61974]: DEBUG nova.network.neutron [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 655.924285] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378894, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.021396] env[61974]: DEBUG nova.network.neutron [req-300108ac-0bb2-4306-a60a-c09995f56ca2 req-d563f292-966a-4608-8fc3-cecbd766b466 service nova] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 656.035182] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Acquiring lock "f6b76518-d691-4e4f-861a-624a1684e564" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.035739] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Lock "f6b76518-d691-4e4f-861a-624a1684e564" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 656.062164] env[61974]: DEBUG nova.network.neutron [-] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.085086] env[61974]: DEBUG nova.network.neutron [req-300108ac-0bb2-4306-a60a-c09995f56ca2 req-d563f292-966a-4608-8fc3-cecbd766b466 service nova] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.178834] env[61974]: DEBUG nova.compute.utils [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 656.183072] env[61974]: DEBUG nova.compute.manager [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 656.183186] env[61974]: DEBUG nova.network.neutron [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 656.248491] env[61974]: DEBUG nova.policy [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '468bf17d7d5643fa9b4587b7ce5df7d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fe4c34c59393476bb016bd09ed45164a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 656.418818] env[61974]: DEBUG oslo_vmware.api [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378894, 'name': PowerOnVM_Task, 'duration_secs': 0.558797} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.421055] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 656.421262] env[61974]: INFO nova.compute.manager [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Took 9.21 seconds to spawn the instance on the hypervisor. [ 656.421511] env[61974]: DEBUG nova.compute.manager [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 656.422633] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f610ca-5206-4cd7-87ea-4bcf60846f20 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.539130] env[61974]: DEBUG nova.network.neutron [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 656.565302] env[61974]: INFO nova.compute.manager [-] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Took 1.03 seconds to deallocate network for instance. [ 656.570909] env[61974]: DEBUG nova.compute.claims [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 656.571143] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.587104] env[61974]: DEBUG oslo_concurrency.lockutils [req-300108ac-0bb2-4306-a60a-c09995f56ca2 req-d563f292-966a-4608-8fc3-cecbd766b466 service nova] Releasing lock "refresh_cache-84448f61-d302-428f-b995-e942e27c39fd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.587104] env[61974]: DEBUG nova.compute.manager [req-300108ac-0bb2-4306-a60a-c09995f56ca2 req-d563f292-966a-4608-8fc3-cecbd766b466 service nova] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Received event network-vif-deleted-81241c22-96f1-4162-9b50-5dcc59b8615c {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 656.623265] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657ee293-d8c9-4ea5-a814-0322a6389c3b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.636747] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe0048f-12d8-40a0-9219-f02a8d0839a5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.673376] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17ea321-eaa6-4503-bba7-04a39955e242 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.680795] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a674889-5346-4c06-9de4-1bcf53fb9dfb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.684961] env[61974]: DEBUG nova.compute.manager [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 656.697859] env[61974]: DEBUG nova.compute.provider_tree [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.702936] env[61974]: DEBUG nova.network.neutron [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.805613] env[61974]: DEBUG nova.network.neutron [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Successfully created port: 2c7e35d9-5de4-4553-8d4e-bc3608ff33fc {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 656.945589] env[61974]: INFO nova.compute.manager [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Took 29.07 seconds to build instance. [ 657.205985] env[61974]: DEBUG nova.scheduler.client.report [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 657.207541] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Releasing lock "refresh_cache-32b1f31b-1e2e-4f53-8e97-265f79a74899" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 657.208018] env[61974]: DEBUG nova.compute.manager [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 657.208254] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 657.208695] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ed29733-779e-4e66-9228-cf71fdb3f2a1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.219229] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a5b896f-32ad-484a-a886-04f77e179588 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.243722] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 32b1f31b-1e2e-4f53-8e97-265f79a74899 could not be found. [ 657.243955] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 657.244220] env[61974]: INFO nova.compute.manager [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Took 0.04 seconds to destroy the instance on the hypervisor. [ 657.244420] env[61974]: DEBUG oslo.service.loopingcall [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 657.244643] env[61974]: DEBUG nova.compute.manager [-] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 657.244740] env[61974]: DEBUG nova.network.neutron [-] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 657.263940] env[61974]: DEBUG nova.network.neutron [-] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 657.449563] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b732987e-9e45-4634-8da3-382fa195f036 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Lock "41fccade-6e5f-4642-8889-2ce00dbff1c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.230s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.479959] env[61974]: DEBUG nova.compute.manager [None req-d53e507a-8e4e-4085-987c-3d09337b7d94 tempest-ServerDiagnosticsV248Test-2108047383 tempest-ServerDiagnosticsV248Test-2108047383-project-admin] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 657.481234] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26222bb7-0439-43e4-9f89-2bf3c2c1fc3a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.488881] env[61974]: INFO nova.compute.manager [None req-d53e507a-8e4e-4085-987c-3d09337b7d94 tempest-ServerDiagnosticsV248Test-2108047383 tempest-ServerDiagnosticsV248Test-2108047383-project-admin] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Retrieving diagnostics [ 657.490554] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee5ac2d-665d-418c-a7c6-d2b7a3b4ae1a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.643157] env[61974]: DEBUG nova.compute.manager [req-6c7ff641-f306-44c6-a957-d1504662fcf0 req-9ccf3a3b-3daa-4975-8a4f-9101cf49c6f8 service nova] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Received event network-changed-137cbe69-c6b7-49c8-9037-a0fc85f4f4c1 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 657.643347] env[61974]: DEBUG nova.compute.manager [req-6c7ff641-f306-44c6-a957-d1504662fcf0 req-9ccf3a3b-3daa-4975-8a4f-9101cf49c6f8 service nova] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Refreshing instance network info cache due to event network-changed-137cbe69-c6b7-49c8-9037-a0fc85f4f4c1. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 657.643563] env[61974]: DEBUG oslo_concurrency.lockutils [req-6c7ff641-f306-44c6-a957-d1504662fcf0 req-9ccf3a3b-3daa-4975-8a4f-9101cf49c6f8 service nova] Acquiring lock "refresh_cache-32b1f31b-1e2e-4f53-8e97-265f79a74899" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.643704] env[61974]: DEBUG oslo_concurrency.lockutils [req-6c7ff641-f306-44c6-a957-d1504662fcf0 req-9ccf3a3b-3daa-4975-8a4f-9101cf49c6f8 service nova] Acquired lock "refresh_cache-32b1f31b-1e2e-4f53-8e97-265f79a74899" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.643860] env[61974]: DEBUG nova.network.neutron [req-6c7ff641-f306-44c6-a957-d1504662fcf0 req-9ccf3a3b-3daa-4975-8a4f-9101cf49c6f8 service nova] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Refreshing network info cache for port 137cbe69-c6b7-49c8-9037-a0fc85f4f4c1 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 657.695756] env[61974]: DEBUG nova.compute.manager [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 657.712019] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.037s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.712019] env[61974]: ERROR nova.compute.manager [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 270ec13f-ec53-4668-ad5d-a03e46ac6083, please check neutron logs for more information. [ 657.712019] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] Traceback (most recent call last): [ 657.712019] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 657.712019] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] self.driver.spawn(context, instance, image_meta, [ 657.712019] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 657.712019] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 657.712019] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 657.712019] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] vm_ref = self.build_virtual_machine(instance, [ 657.712786] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 657.712786] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] vif_infos = vmwarevif.get_vif_info(self._session, [ 657.712786] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 657.712786] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] for vif in network_info: [ 657.712786] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 657.712786] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] return self._sync_wrapper(fn, *args, **kwargs) [ 657.712786] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 657.712786] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] self.wait() [ 657.712786] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 657.712786] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] self[:] = self._gt.wait() [ 657.712786] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 657.712786] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] return self._exit_event.wait() [ 657.712786] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 657.713170] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] result = hub.switch() [ 657.713170] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 657.713170] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] return self.greenlet.switch() [ 657.713170] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 657.713170] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] result = function(*args, **kwargs) [ 657.713170] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 657.713170] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] return func(*args, **kwargs) [ 657.713170] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 657.713170] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] raise e [ 657.713170] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 657.713170] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] nwinfo = self.network_api.allocate_for_instance( [ 657.713170] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 657.713170] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] created_port_ids = self._update_ports_for_instance( [ 657.713542] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 657.713542] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] with excutils.save_and_reraise_exception(): [ 657.713542] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 657.713542] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] self.force_reraise() [ 657.713542] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 657.713542] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] raise self.value [ 657.713542] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 657.713542] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] updated_port = self._update_port( [ 657.713542] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 657.713542] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] _ensure_no_port_binding_failure(port) [ 657.713542] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 657.713542] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] raise exception.PortBindingFailed(port_id=port['id']) [ 657.713874] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] nova.exception.PortBindingFailed: Binding failed for port 270ec13f-ec53-4668-ad5d-a03e46ac6083, please check neutron logs for more information. [ 657.713874] env[61974]: ERROR nova.compute.manager [instance: 287496bf-b981-41d5-81fc-791d793c244e] [ 657.713874] env[61974]: DEBUG nova.compute.utils [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Binding failed for port 270ec13f-ec53-4668-ad5d-a03e46ac6083, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 657.714963] env[61974]: DEBUG oslo_concurrency.lockutils [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.467s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.718135] env[61974]: INFO nova.compute.claims [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 657.728082] env[61974]: DEBUG nova.compute.manager [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Build of instance 287496bf-b981-41d5-81fc-791d793c244e was re-scheduled: Binding failed for port 270ec13f-ec53-4668-ad5d-a03e46ac6083, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 657.728082] env[61974]: DEBUG nova.compute.manager [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 657.728082] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Acquiring lock "refresh_cache-287496bf-b981-41d5-81fc-791d793c244e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.728082] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Acquired lock "refresh_cache-287496bf-b981-41d5-81fc-791d793c244e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.728306] env[61974]: DEBUG nova.network.neutron [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 657.736642] env[61974]: DEBUG nova.virt.hardware [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 657.736807] env[61974]: DEBUG nova.virt.hardware [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 657.736925] env[61974]: DEBUG nova.virt.hardware [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 657.737141] env[61974]: DEBUG nova.virt.hardware [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 657.737291] env[61974]: DEBUG nova.virt.hardware [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 657.737436] env[61974]: DEBUG nova.virt.hardware [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 657.737638] env[61974]: DEBUG nova.virt.hardware [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 657.737795] env[61974]: DEBUG nova.virt.hardware [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 657.737958] env[61974]: DEBUG nova.virt.hardware [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 657.738332] env[61974]: DEBUG nova.virt.hardware [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 657.738594] env[61974]: DEBUG nova.virt.hardware [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 657.739703] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f893409b-03e0-476a-a320-fd89141caad1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.747678] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-964948ed-59de-40f4-96c9-5fc2f40a2d6b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.767064] env[61974]: DEBUG nova.network.neutron [-] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.953169] env[61974]: DEBUG nova.compute.manager [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 657.978350] env[61974]: ERROR nova.compute.manager [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2c7e35d9-5de4-4553-8d4e-bc3608ff33fc, please check neutron logs for more information. [ 657.978350] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 657.978350] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 657.978350] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 657.978350] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 657.978350] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 657.978350] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 657.978350] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 657.978350] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 657.978350] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 657.978350] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 657.978350] env[61974]: ERROR nova.compute.manager raise self.value [ 657.978350] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 657.978350] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 657.978350] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 657.978350] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 657.979042] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 657.979042] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 657.979042] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2c7e35d9-5de4-4553-8d4e-bc3608ff33fc, please check neutron logs for more information. [ 657.979042] env[61974]: ERROR nova.compute.manager [ 657.979042] env[61974]: Traceback (most recent call last): [ 657.979042] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 657.979042] env[61974]: listener.cb(fileno) [ 657.979042] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 657.979042] env[61974]: result = function(*args, **kwargs) [ 657.979042] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 657.979042] env[61974]: return func(*args, **kwargs) [ 657.979042] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 657.979042] env[61974]: raise e [ 657.979042] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 657.979042] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 657.979042] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 657.979042] env[61974]: created_port_ids = self._update_ports_for_instance( [ 657.979042] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 657.979042] env[61974]: with excutils.save_and_reraise_exception(): [ 657.979042] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 657.979042] env[61974]: self.force_reraise() [ 657.979042] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 657.979042] env[61974]: raise self.value [ 657.979042] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 657.979042] env[61974]: updated_port = self._update_port( [ 657.979042] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 657.979042] env[61974]: _ensure_no_port_binding_failure(port) [ 657.979042] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 657.979042] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 657.980247] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 2c7e35d9-5de4-4553-8d4e-bc3608ff33fc, please check neutron logs for more information. [ 657.980247] env[61974]: Removing descriptor: 20 [ 657.980247] env[61974]: ERROR nova.compute.manager [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2c7e35d9-5de4-4553-8d4e-bc3608ff33fc, please check neutron logs for more information. [ 657.980247] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Traceback (most recent call last): [ 657.980247] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 657.980247] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] yield resources [ 657.980247] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 657.980247] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] self.driver.spawn(context, instance, image_meta, [ 657.980247] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 657.980247] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] self._vmops.spawn(context, instance, image_meta, injected_files, [ 657.980247] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 657.980247] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] vm_ref = self.build_virtual_machine(instance, [ 657.981074] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 657.981074] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] vif_infos = vmwarevif.get_vif_info(self._session, [ 657.981074] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 657.981074] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] for vif in network_info: [ 657.981074] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 657.981074] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] return self._sync_wrapper(fn, *args, **kwargs) [ 657.981074] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 657.981074] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] self.wait() [ 657.981074] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 657.981074] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] self[:] = self._gt.wait() [ 657.981074] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 657.981074] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] return self._exit_event.wait() [ 657.981074] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 657.981729] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] result = hub.switch() [ 657.981729] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 657.981729] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] return self.greenlet.switch() [ 657.981729] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 657.981729] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] result = function(*args, **kwargs) [ 657.981729] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 657.981729] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] return func(*args, **kwargs) [ 657.981729] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 657.981729] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] raise e [ 657.981729] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 657.981729] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] nwinfo = self.network_api.allocate_for_instance( [ 657.981729] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 657.981729] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] created_port_ids = self._update_ports_for_instance( [ 657.982476] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 657.982476] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] with excutils.save_and_reraise_exception(): [ 657.982476] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 657.982476] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] self.force_reraise() [ 657.982476] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 657.982476] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] raise self.value [ 657.982476] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 657.982476] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] updated_port = self._update_port( [ 657.982476] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 657.982476] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] _ensure_no_port_binding_failure(port) [ 657.982476] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 657.982476] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] raise exception.PortBindingFailed(port_id=port['id']) [ 657.982983] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] nova.exception.PortBindingFailed: Binding failed for port 2c7e35d9-5de4-4553-8d4e-bc3608ff33fc, please check neutron logs for more information. [ 657.982983] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] [ 657.982983] env[61974]: INFO nova.compute.manager [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Terminating instance [ 657.982983] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Acquiring lock "refresh_cache-270447a7-ebbf-4671-bc6c-522f23d21788" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.982983] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Acquired lock "refresh_cache-270447a7-ebbf-4671-bc6c-522f23d21788" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.982983] env[61974]: DEBUG nova.network.neutron [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 658.170017] env[61974]: DEBUG nova.network.neutron [req-6c7ff641-f306-44c6-a957-d1504662fcf0 req-9ccf3a3b-3daa-4975-8a4f-9101cf49c6f8 service nova] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 658.258232] env[61974]: DEBUG nova.network.neutron [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 658.266463] env[61974]: DEBUG nova.network.neutron [req-6c7ff641-f306-44c6-a957-d1504662fcf0 req-9ccf3a3b-3daa-4975-8a4f-9101cf49c6f8 service nova] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.269251] env[61974]: INFO nova.compute.manager [-] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Took 1.02 seconds to deallocate network for instance. [ 658.271194] env[61974]: DEBUG nova.compute.claims [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 658.271369] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 658.352382] env[61974]: DEBUG nova.network.neutron [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.476275] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 658.503324] env[61974]: DEBUG nova.network.neutron [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 658.562416] env[61974]: DEBUG nova.network.neutron [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.769314] env[61974]: DEBUG oslo_concurrency.lockutils [req-6c7ff641-f306-44c6-a957-d1504662fcf0 req-9ccf3a3b-3daa-4975-8a4f-9101cf49c6f8 service nova] Releasing lock "refresh_cache-32b1f31b-1e2e-4f53-8e97-265f79a74899" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.769725] env[61974]: DEBUG nova.compute.manager [req-6c7ff641-f306-44c6-a957-d1504662fcf0 req-9ccf3a3b-3daa-4975-8a4f-9101cf49c6f8 service nova] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Received event network-vif-deleted-137cbe69-c6b7-49c8-9037-a0fc85f4f4c1 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 658.855246] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Releasing lock "refresh_cache-287496bf-b981-41d5-81fc-791d793c244e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.855480] env[61974]: DEBUG nova.compute.manager [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 658.855657] env[61974]: DEBUG nova.compute.manager [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 658.855819] env[61974]: DEBUG nova.network.neutron [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 658.872063] env[61974]: DEBUG nova.network.neutron [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 659.066389] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Releasing lock "refresh_cache-270447a7-ebbf-4671-bc6c-522f23d21788" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 659.067051] env[61974]: DEBUG nova.compute.manager [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 659.067051] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 659.068980] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-43cfb28c-7bd7-43d2-ab17-bdebe121ec57 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.077852] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49192b5-5c6d-4162-bc03-15f5d66efc2d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.091679] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 659.092136] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 659.102643] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e494bace-3f29-45a8-90b4-afa5753c7936 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.111053] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 270447a7-ebbf-4671-bc6c-522f23d21788 could not be found. [ 659.111308] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 659.111497] env[61974]: INFO nova.compute.manager [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Took 0.04 seconds to destroy the instance on the hypervisor. [ 659.111741] env[61974]: DEBUG oslo.service.loopingcall [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 659.112366] env[61974]: DEBUG nova.compute.manager [-] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 659.112462] env[61974]: DEBUG nova.network.neutron [-] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 659.117699] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1cdd873-1cc0-4056-bcaf-565aa9802875 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.148563] env[61974]: DEBUG nova.network.neutron [-] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 659.150287] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-141baea4-fdcf-4dda-a0b8-adbd21dc4285 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.158391] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64919b47-4c25-4eff-82b9-a3479d7c7432 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.173761] env[61974]: DEBUG nova.compute.provider_tree [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.374964] env[61974]: DEBUG nova.network.neutron [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.599024] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 659.599024] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Starting heal instance info cache {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 659.599024] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Rebuilding the list of instances to heal {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 659.654250] env[61974]: DEBUG nova.network.neutron [-] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.666064] env[61974]: DEBUG nova.compute.manager [req-e9c89e3d-eef7-4337-82b6-9c49612daada req-1e24ed8f-5045-4af1-8f1b-cfe6ae3d73e1 service nova] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Received event network-changed-2c7e35d9-5de4-4553-8d4e-bc3608ff33fc {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 659.666220] env[61974]: DEBUG nova.compute.manager [req-e9c89e3d-eef7-4337-82b6-9c49612daada req-1e24ed8f-5045-4af1-8f1b-cfe6ae3d73e1 service nova] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Refreshing instance network info cache due to event network-changed-2c7e35d9-5de4-4553-8d4e-bc3608ff33fc. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 659.666419] env[61974]: DEBUG oslo_concurrency.lockutils [req-e9c89e3d-eef7-4337-82b6-9c49612daada req-1e24ed8f-5045-4af1-8f1b-cfe6ae3d73e1 service nova] Acquiring lock "refresh_cache-270447a7-ebbf-4671-bc6c-522f23d21788" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.666569] env[61974]: DEBUG oslo_concurrency.lockutils [req-e9c89e3d-eef7-4337-82b6-9c49612daada req-1e24ed8f-5045-4af1-8f1b-cfe6ae3d73e1 service nova] Acquired lock "refresh_cache-270447a7-ebbf-4671-bc6c-522f23d21788" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.666726] env[61974]: DEBUG nova.network.neutron [req-e9c89e3d-eef7-4337-82b6-9c49612daada req-1e24ed8f-5045-4af1-8f1b-cfe6ae3d73e1 service nova] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Refreshing network info cache for port 2c7e35d9-5de4-4553-8d4e-bc3608ff33fc {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 659.676401] env[61974]: DEBUG nova.scheduler.client.report [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 659.878702] env[61974]: INFO nova.compute.manager [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] [instance: 287496bf-b981-41d5-81fc-791d793c244e] Took 1.02 seconds to deallocate network for instance. [ 660.102446] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 660.102618] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 660.102756] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 660.102879] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 660.103049] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 660.103196] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 660.103315] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 660.135935] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "refresh_cache-41fccade-6e5f-4642-8889-2ce00dbff1c7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 660.136126] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquired lock "refresh_cache-41fccade-6e5f-4642-8889-2ce00dbff1c7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.136297] env[61974]: DEBUG nova.network.neutron [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Forcefully refreshing network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 660.136485] env[61974]: DEBUG nova.objects.instance [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lazy-loading 'info_cache' on Instance uuid 41fccade-6e5f-4642-8889-2ce00dbff1c7 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 660.157134] env[61974]: INFO nova.compute.manager [-] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Took 1.04 seconds to deallocate network for instance. [ 660.160045] env[61974]: DEBUG nova.compute.claims [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 660.160403] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.182908] env[61974]: DEBUG oslo_concurrency.lockutils [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.467s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.182908] env[61974]: DEBUG nova.compute.manager [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 660.185728] env[61974]: DEBUG nova.network.neutron [req-e9c89e3d-eef7-4337-82b6-9c49612daada req-1e24ed8f-5045-4af1-8f1b-cfe6ae3d73e1 service nova] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 660.187514] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.371s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.256531] env[61974]: DEBUG nova.network.neutron [req-e9c89e3d-eef7-4337-82b6-9c49612daada req-1e24ed8f-5045-4af1-8f1b-cfe6ae3d73e1 service nova] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.693740] env[61974]: DEBUG nova.compute.utils [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 660.698809] env[61974]: DEBUG nova.compute.manager [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 660.698985] env[61974]: DEBUG nova.network.neutron [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 660.760258] env[61974]: DEBUG oslo_concurrency.lockutils [req-e9c89e3d-eef7-4337-82b6-9c49612daada req-1e24ed8f-5045-4af1-8f1b-cfe6ae3d73e1 service nova] Releasing lock "refresh_cache-270447a7-ebbf-4671-bc6c-522f23d21788" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.760506] env[61974]: DEBUG nova.compute.manager [req-e9c89e3d-eef7-4337-82b6-9c49612daada req-1e24ed8f-5045-4af1-8f1b-cfe6ae3d73e1 service nova] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Received event network-vif-deleted-2c7e35d9-5de4-4553-8d4e-bc3608ff33fc {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 660.761906] env[61974]: DEBUG nova.policy [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '363828c32a204f83b05e8492eb177098', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7779372a20e04d10b28d4ff9b784b689', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 660.912822] env[61974]: INFO nova.scheduler.client.report [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Deleted allocations for instance 287496bf-b981-41d5-81fc-791d793c244e [ 661.070086] env[61974]: DEBUG nova.network.neutron [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Successfully created port: 4213fc5e-5fe2-4688-a8c5-65efea292464 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 661.090695] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30858c6-4e1b-4324-9cd0-8fb5c2f145ed {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.102099] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05390f3-8d97-4fbf-8d6d-a0e31f05f85c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.134275] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d665ed-905d-40b4-b5b6-860c410d4f7b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.142143] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3061bfdf-61c2-4f48-bd3b-fc9f75eb0460 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.157118] env[61974]: DEBUG nova.compute.provider_tree [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 661.164837] env[61974]: DEBUG nova.network.neutron [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 661.199443] env[61974]: DEBUG nova.compute.manager [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 661.423904] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d499e03-0978-422b-b6f1-312211ba4928 tempest-ServerActionsTestOtherB-866237681 tempest-ServerActionsTestOtherB-866237681-project-member] Lock "287496bf-b981-41d5-81fc-791d793c244e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.994s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.660480] env[61974]: DEBUG nova.scheduler.client.report [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 661.749581] env[61974]: DEBUG nova.network.neutron [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.926872] env[61974]: DEBUG nova.compute.manager [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 661.969320] env[61974]: DEBUG nova.compute.manager [req-bccc8492-ac20-427c-98e5-7f07332a650f req-98f1c492-be69-47fc-b992-930a42e37beb service nova] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Received event network-changed-4213fc5e-5fe2-4688-a8c5-65efea292464 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 661.969482] env[61974]: DEBUG nova.compute.manager [req-bccc8492-ac20-427c-98e5-7f07332a650f req-98f1c492-be69-47fc-b992-930a42e37beb service nova] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Refreshing instance network info cache due to event network-changed-4213fc5e-5fe2-4688-a8c5-65efea292464. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 661.969696] env[61974]: DEBUG oslo_concurrency.lockutils [req-bccc8492-ac20-427c-98e5-7f07332a650f req-98f1c492-be69-47fc-b992-930a42e37beb service nova] Acquiring lock "refresh_cache-1c7edeed-2fa7-4662-9994-21708dcb3efd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.969828] env[61974]: DEBUG oslo_concurrency.lockutils [req-bccc8492-ac20-427c-98e5-7f07332a650f req-98f1c492-be69-47fc-b992-930a42e37beb service nova] Acquired lock "refresh_cache-1c7edeed-2fa7-4662-9994-21708dcb3efd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.969983] env[61974]: DEBUG nova.network.neutron [req-bccc8492-ac20-427c-98e5-7f07332a650f req-98f1c492-be69-47fc-b992-930a42e37beb service nova] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Refreshing network info cache for port 4213fc5e-5fe2-4688-a8c5-65efea292464 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 662.165528] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.978s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.166106] env[61974]: ERROR nova.compute.manager [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e78a7d88-ebd9-4dcd-b1cc-61d929b8ef15, please check neutron logs for more information. [ 662.166106] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Traceback (most recent call last): [ 662.166106] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 662.166106] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] self.driver.spawn(context, instance, image_meta, [ 662.166106] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 662.166106] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] self._vmops.spawn(context, instance, image_meta, injected_files, [ 662.166106] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 662.166106] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] vm_ref = self.build_virtual_machine(instance, [ 662.166106] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 662.166106] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] vif_infos = vmwarevif.get_vif_info(self._session, [ 662.166106] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 662.166576] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] for vif in network_info: [ 662.166576] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 662.166576] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] return self._sync_wrapper(fn, *args, **kwargs) [ 662.166576] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 662.166576] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] self.wait() [ 662.166576] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 662.166576] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] self[:] = self._gt.wait() [ 662.166576] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 662.166576] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] return self._exit_event.wait() [ 662.166576] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 662.166576] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] result = hub.switch() [ 662.166576] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 662.166576] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] return self.greenlet.switch() [ 662.167089] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 662.167089] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] result = function(*args, **kwargs) [ 662.167089] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 662.167089] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] return func(*args, **kwargs) [ 662.167089] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 662.167089] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] raise e [ 662.167089] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 662.167089] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] nwinfo = self.network_api.allocate_for_instance( [ 662.167089] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 662.167089] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] created_port_ids = self._update_ports_for_instance( [ 662.167089] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 662.167089] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] with excutils.save_and_reraise_exception(): [ 662.167089] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 662.167612] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] self.force_reraise() [ 662.167612] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 662.167612] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] raise self.value [ 662.167612] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 662.167612] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] updated_port = self._update_port( [ 662.167612] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 662.167612] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] _ensure_no_port_binding_failure(port) [ 662.167612] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 662.167612] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] raise exception.PortBindingFailed(port_id=port['id']) [ 662.167612] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] nova.exception.PortBindingFailed: Binding failed for port e78a7d88-ebd9-4dcd-b1cc-61d929b8ef15, please check neutron logs for more information. [ 662.167612] env[61974]: ERROR nova.compute.manager [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] [ 662.167995] env[61974]: DEBUG nova.compute.utils [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Binding failed for port e78a7d88-ebd9-4dcd-b1cc-61d929b8ef15, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 662.169635] env[61974]: ERROR nova.compute.manager [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4213fc5e-5fe2-4688-a8c5-65efea292464, please check neutron logs for more information. [ 662.169635] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 662.169635] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 662.169635] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 662.169635] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 662.169635] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 662.169635] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 662.169635] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 662.169635] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 662.169635] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 662.169635] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 662.169635] env[61974]: ERROR nova.compute.manager raise self.value [ 662.169635] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 662.169635] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 662.169635] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 662.169635] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 662.170222] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 662.170222] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 662.170222] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4213fc5e-5fe2-4688-a8c5-65efea292464, please check neutron logs for more information. [ 662.170222] env[61974]: ERROR nova.compute.manager [ 662.170222] env[61974]: Traceback (most recent call last): [ 662.170222] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 662.170222] env[61974]: listener.cb(fileno) [ 662.170222] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 662.170222] env[61974]: result = function(*args, **kwargs) [ 662.170222] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 662.170222] env[61974]: return func(*args, **kwargs) [ 662.170222] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 662.170222] env[61974]: raise e [ 662.170222] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 662.170222] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 662.170222] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 662.170222] env[61974]: created_port_ids = self._update_ports_for_instance( [ 662.170222] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 662.170222] env[61974]: with excutils.save_and_reraise_exception(): [ 662.170222] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 662.170222] env[61974]: self.force_reraise() [ 662.170222] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 662.170222] env[61974]: raise self.value [ 662.170222] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 662.170222] env[61974]: updated_port = self._update_port( [ 662.170222] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 662.170222] env[61974]: _ensure_no_port_binding_failure(port) [ 662.170222] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 662.170222] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 662.171296] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 4213fc5e-5fe2-4688-a8c5-65efea292464, please check neutron logs for more information. [ 662.171296] env[61974]: Removing descriptor: 21 [ 662.171296] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.492s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.171697] env[61974]: INFO nova.compute.claims [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 662.175187] env[61974]: DEBUG nova.compute.manager [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Build of instance 62413031-5c7a-498a-9aee-5d9015ef1574 was re-scheduled: Binding failed for port e78a7d88-ebd9-4dcd-b1cc-61d929b8ef15, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 662.175673] env[61974]: DEBUG nova.compute.manager [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 662.176016] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Acquiring lock "refresh_cache-62413031-5c7a-498a-9aee-5d9015ef1574" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.176224] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Acquired lock "refresh_cache-62413031-5c7a-498a-9aee-5d9015ef1574" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.176428] env[61974]: DEBUG nova.network.neutron [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 662.209714] env[61974]: DEBUG nova.compute.manager [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 662.239671] env[61974]: DEBUG nova.virt.hardware [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 662.239973] env[61974]: DEBUG nova.virt.hardware [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.240192] env[61974]: DEBUG nova.virt.hardware [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 662.240421] env[61974]: DEBUG nova.virt.hardware [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.240605] env[61974]: DEBUG nova.virt.hardware [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 662.240789] env[61974]: DEBUG nova.virt.hardware [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 662.241212] env[61974]: DEBUG nova.virt.hardware [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 662.241455] env[61974]: DEBUG nova.virt.hardware [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 662.241656] env[61974]: DEBUG nova.virt.hardware [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 662.241858] env[61974]: DEBUG nova.virt.hardware [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 662.242082] env[61974]: DEBUG nova.virt.hardware [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 662.243159] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569c1f12-816e-4293-803d-c0d8fcf7b6e4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.254587] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d9f117-3fb3-44fb-8675-fa9300b960aa {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.258759] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Releasing lock "refresh_cache-41fccade-6e5f-4642-8889-2ce00dbff1c7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 662.258944] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Updated the network info_cache for instance {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 662.259151] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.259983] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.260348] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.260595] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.260757] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.260907] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.261075] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 662.261293] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager.update_available_resource {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.270886] env[61974]: ERROR nova.compute.manager [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4213fc5e-5fe2-4688-a8c5-65efea292464, please check neutron logs for more information. [ 662.270886] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Traceback (most recent call last): [ 662.270886] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 662.270886] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] yield resources [ 662.270886] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 662.270886] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] self.driver.spawn(context, instance, image_meta, [ 662.270886] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 662.270886] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 662.270886] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 662.270886] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] vm_ref = self.build_virtual_machine(instance, [ 662.270886] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 662.271367] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] vif_infos = vmwarevif.get_vif_info(self._session, [ 662.271367] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 662.271367] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] for vif in network_info: [ 662.271367] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 662.271367] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] return self._sync_wrapper(fn, *args, **kwargs) [ 662.271367] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 662.271367] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] self.wait() [ 662.271367] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 662.271367] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] self[:] = self._gt.wait() [ 662.271367] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 662.271367] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] return self._exit_event.wait() [ 662.271367] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 662.271367] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] current.throw(*self._exc) [ 662.271821] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 662.271821] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] result = function(*args, **kwargs) [ 662.271821] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 662.271821] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] return func(*args, **kwargs) [ 662.271821] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 662.271821] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] raise e [ 662.271821] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 662.271821] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] nwinfo = self.network_api.allocate_for_instance( [ 662.271821] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 662.271821] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] created_port_ids = self._update_ports_for_instance( [ 662.271821] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 662.271821] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] with excutils.save_and_reraise_exception(): [ 662.271821] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 662.272279] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] self.force_reraise() [ 662.272279] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 662.272279] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] raise self.value [ 662.272279] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 662.272279] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] updated_port = self._update_port( [ 662.272279] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 662.272279] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] _ensure_no_port_binding_failure(port) [ 662.272279] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 662.272279] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] raise exception.PortBindingFailed(port_id=port['id']) [ 662.272279] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] nova.exception.PortBindingFailed: Binding failed for port 4213fc5e-5fe2-4688-a8c5-65efea292464, please check neutron logs for more information. [ 662.272279] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] [ 662.272279] env[61974]: INFO nova.compute.manager [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Terminating instance [ 662.273447] env[61974]: DEBUG oslo_concurrency.lockutils [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Acquiring lock "refresh_cache-1c7edeed-2fa7-4662-9994-21708dcb3efd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.452333] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.648217] env[61974]: DEBUG nova.network.neutron [req-bccc8492-ac20-427c-98e5-7f07332a650f req-98f1c492-be69-47fc-b992-930a42e37beb service nova] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 662.716342] env[61974]: DEBUG nova.network.neutron [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 662.773546] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.865097] env[61974]: DEBUG nova.network.neutron [req-bccc8492-ac20-427c-98e5-7f07332a650f req-98f1c492-be69-47fc-b992-930a42e37beb service nova] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.897779] env[61974]: DEBUG nova.network.neutron [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.373539] env[61974]: DEBUG oslo_concurrency.lockutils [req-bccc8492-ac20-427c-98e5-7f07332a650f req-98f1c492-be69-47fc-b992-930a42e37beb service nova] Releasing lock "refresh_cache-1c7edeed-2fa7-4662-9994-21708dcb3efd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.374069] env[61974]: DEBUG oslo_concurrency.lockutils [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Acquired lock "refresh_cache-1c7edeed-2fa7-4662-9994-21708dcb3efd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.375436] env[61974]: DEBUG nova.network.neutron [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 663.403045] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Releasing lock "refresh_cache-62413031-5c7a-498a-9aee-5d9015ef1574" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.403045] env[61974]: DEBUG nova.compute.manager [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 663.403045] env[61974]: DEBUG nova.compute.manager [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 663.403045] env[61974]: DEBUG nova.network.neutron [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 663.427488] env[61974]: DEBUG nova.network.neutron [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.637412] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0eee782-6bed-41dc-96a3-f7b07ad2a404 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.645781] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec05618d-a37d-47f6-bf5f-18c034dfef20 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.678038] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7a2a52-19e5-41d9-a790-b3d2959174f3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.685739] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97cba75d-d93a-4d2c-9806-8a3ec74c7507 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.699976] env[61974]: DEBUG nova.compute.provider_tree [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 663.902132] env[61974]: DEBUG nova.network.neutron [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.940174] env[61974]: DEBUG nova.network.neutron [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.009444] env[61974]: DEBUG nova.network.neutron [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.038112] env[61974]: DEBUG nova.compute.manager [req-d0f9966b-5836-4f80-b11e-972f72ee7a00 req-91107ec3-8c68-4e80-b042-db966fbd24f8 service nova] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Received event network-vif-deleted-4213fc5e-5fe2-4688-a8c5-65efea292464 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 664.205753] env[61974]: DEBUG nova.scheduler.client.report [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 664.439882] env[61974]: INFO nova.compute.manager [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] [instance: 62413031-5c7a-498a-9aee-5d9015ef1574] Took 1.04 seconds to deallocate network for instance. [ 664.512547] env[61974]: DEBUG oslo_concurrency.lockutils [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Releasing lock "refresh_cache-1c7edeed-2fa7-4662-9994-21708dcb3efd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.513036] env[61974]: DEBUG nova.compute.manager [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 664.513761] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 664.513761] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1ff20353-dfa8-485f-afc5-9c0a859d979f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.527157] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0445be90-e03a-45f4-afa6-8b2b9a72c683 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.550246] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1c7edeed-2fa7-4662-9994-21708dcb3efd could not be found. [ 664.550375] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 664.550603] env[61974]: INFO nova.compute.manager [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 664.550967] env[61974]: DEBUG oslo.service.loopingcall [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 664.551435] env[61974]: DEBUG nova.compute.manager [-] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 664.551539] env[61974]: DEBUG nova.network.neutron [-] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 664.568434] env[61974]: DEBUG nova.network.neutron [-] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 664.711322] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.541s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 664.711432] env[61974]: DEBUG nova.compute.manager [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 664.714009] env[61974]: DEBUG oslo_concurrency.lockutils [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.998s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.070841] env[61974]: DEBUG nova.network.neutron [-] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.218883] env[61974]: DEBUG nova.compute.utils [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 665.223512] env[61974]: DEBUG nova.compute.manager [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 665.224444] env[61974]: DEBUG nova.network.neutron [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 665.289056] env[61974]: DEBUG nova.policy [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1d6b2b442f324db1953ff5602a8865dd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '88d7eaa5f4574a67a038ce9e91650338', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 665.472112] env[61974]: INFO nova.scheduler.client.report [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Deleted allocations for instance 62413031-5c7a-498a-9aee-5d9015ef1574 [ 665.574485] env[61974]: INFO nova.compute.manager [-] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Took 1.02 seconds to deallocate network for instance. [ 665.584224] env[61974]: DEBUG nova.compute.claims [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 665.584224] env[61974]: DEBUG oslo_concurrency.lockutils [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.620741] env[61974]: DEBUG oslo_concurrency.lockutils [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Acquiring lock "59b1ad04-c949-4b07-af77-f84f842dd9ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.621047] env[61974]: DEBUG oslo_concurrency.lockutils [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Lock "59b1ad04-c949-4b07-af77-f84f842dd9ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.637035] env[61974]: DEBUG nova.network.neutron [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Successfully created port: 51939b60-617f-4b25-93a9-b5ccbdda2671 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 665.642759] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f830ace4-1dc1-4cb7-ab96-631fbabb3541 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.651146] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d67847c-0c11-4eda-ba5c-d2a68fdb4853 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.683762] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65ededb-dc3b-400c-9279-53d44688caca {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.693274] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be59ec9-a64c-4255-b0aa-b7d72f621d1b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.708554] env[61974]: DEBUG nova.compute.provider_tree [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 665.745974] env[61974]: DEBUG nova.compute.manager [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 665.983566] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6f38bacc-be21-40db-b599-30bbdf783e8c tempest-AttachInterfacesV270Test-1219200557 tempest-AttachInterfacesV270Test-1219200557-project-member] Lock "62413031-5c7a-498a-9aee-5d9015ef1574" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.820s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.214027] env[61974]: DEBUG nova.scheduler.client.report [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 666.486343] env[61974]: DEBUG nova.compute.manager [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 666.534944] env[61974]: DEBUG nova.compute.manager [req-2e263e32-d226-4f81-ad20-62dd85f31e57 req-c81f84a0-b13e-4609-8801-b4a8126e6e6a service nova] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Received event network-changed-51939b60-617f-4b25-93a9-b5ccbdda2671 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 666.534944] env[61974]: DEBUG nova.compute.manager [req-2e263e32-d226-4f81-ad20-62dd85f31e57 req-c81f84a0-b13e-4609-8801-b4a8126e6e6a service nova] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Refreshing instance network info cache due to event network-changed-51939b60-617f-4b25-93a9-b5ccbdda2671. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 666.535942] env[61974]: DEBUG oslo_concurrency.lockutils [req-2e263e32-d226-4f81-ad20-62dd85f31e57 req-c81f84a0-b13e-4609-8801-b4a8126e6e6a service nova] Acquiring lock "refresh_cache-59c238dd-10f0-437c-a794-79bc87f05f2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.535942] env[61974]: DEBUG oslo_concurrency.lockutils [req-2e263e32-d226-4f81-ad20-62dd85f31e57 req-c81f84a0-b13e-4609-8801-b4a8126e6e6a service nova] Acquired lock "refresh_cache-59c238dd-10f0-437c-a794-79bc87f05f2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.536143] env[61974]: DEBUG nova.network.neutron [req-2e263e32-d226-4f81-ad20-62dd85f31e57 req-c81f84a0-b13e-4609-8801-b4a8126e6e6a service nova] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Refreshing network info cache for port 51939b60-617f-4b25-93a9-b5ccbdda2671 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 666.718186] env[61974]: DEBUG oslo_concurrency.lockutils [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.004s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.718853] env[61974]: ERROR nova.compute.manager [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 05f49d80-b505-45bb-a0d6-b0a4d913c71e, please check neutron logs for more information. [ 666.718853] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Traceback (most recent call last): [ 666.718853] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 666.718853] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] self.driver.spawn(context, instance, image_meta, [ 666.718853] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 666.718853] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] self._vmops.spawn(context, instance, image_meta, injected_files, [ 666.718853] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 666.718853] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] vm_ref = self.build_virtual_machine(instance, [ 666.718853] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 666.718853] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] vif_infos = vmwarevif.get_vif_info(self._session, [ 666.718853] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 666.719098] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] for vif in network_info: [ 666.719098] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 666.719098] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] return self._sync_wrapper(fn, *args, **kwargs) [ 666.719098] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 666.719098] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] self.wait() [ 666.719098] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 666.719098] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] self[:] = self._gt.wait() [ 666.719098] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 666.719098] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] return self._exit_event.wait() [ 666.719098] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 666.719098] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] result = hub.switch() [ 666.719098] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 666.719098] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] return self.greenlet.switch() [ 666.719343] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 666.719343] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] result = function(*args, **kwargs) [ 666.719343] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 666.719343] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] return func(*args, **kwargs) [ 666.719343] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 666.719343] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] raise e [ 666.719343] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 666.719343] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] nwinfo = self.network_api.allocate_for_instance( [ 666.719343] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 666.719343] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] created_port_ids = self._update_ports_for_instance( [ 666.719343] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 666.719343] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] with excutils.save_and_reraise_exception(): [ 666.719343] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.719622] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] self.force_reraise() [ 666.719622] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.719622] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] raise self.value [ 666.719622] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 666.719622] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] updated_port = self._update_port( [ 666.719622] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.719622] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] _ensure_no_port_binding_failure(port) [ 666.719622] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.719622] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] raise exception.PortBindingFailed(port_id=port['id']) [ 666.719622] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] nova.exception.PortBindingFailed: Binding failed for port 05f49d80-b505-45bb-a0d6-b0a4d913c71e, please check neutron logs for more information. [ 666.719622] env[61974]: ERROR nova.compute.manager [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] [ 666.719856] env[61974]: DEBUG nova.compute.utils [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Binding failed for port 05f49d80-b505-45bb-a0d6-b0a4d913c71e, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 666.724721] env[61974]: DEBUG nova.compute.manager [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Build of instance 622aca09-aab9-4e93-b4d3-621d33df7903 was re-scheduled: Binding failed for port 05f49d80-b505-45bb-a0d6-b0a4d913c71e, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 666.725187] env[61974]: DEBUG nova.compute.manager [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 666.725417] env[61974]: DEBUG oslo_concurrency.lockutils [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Acquiring lock "refresh_cache-622aca09-aab9-4e93-b4d3-621d33df7903" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.726407] env[61974]: DEBUG oslo_concurrency.lockutils [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Acquired lock "refresh_cache-622aca09-aab9-4e93-b4d3-621d33df7903" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.726566] env[61974]: DEBUG nova.network.neutron [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 666.729628] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.351s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.738932] env[61974]: DEBUG nova.compute.manager [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 666.759182] env[61974]: ERROR nova.compute.manager [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 51939b60-617f-4b25-93a9-b5ccbdda2671, please check neutron logs for more information. [ 666.759182] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 666.759182] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 666.759182] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 666.759182] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 666.759182] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 666.759182] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 666.759182] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 666.759182] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.759182] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 666.759182] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.759182] env[61974]: ERROR nova.compute.manager raise self.value [ 666.759182] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 666.759182] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 666.759182] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.759182] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 666.759550] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.759550] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 666.759550] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 51939b60-617f-4b25-93a9-b5ccbdda2671, please check neutron logs for more information. [ 666.759550] env[61974]: ERROR nova.compute.manager [ 666.759550] env[61974]: Traceback (most recent call last): [ 666.759550] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 666.759550] env[61974]: listener.cb(fileno) [ 666.759550] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 666.759550] env[61974]: result = function(*args, **kwargs) [ 666.759550] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 666.759550] env[61974]: return func(*args, **kwargs) [ 666.759550] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 666.759550] env[61974]: raise e [ 666.759550] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 666.759550] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 666.759550] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 666.759550] env[61974]: created_port_ids = self._update_ports_for_instance( [ 666.759550] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 666.759550] env[61974]: with excutils.save_and_reraise_exception(): [ 666.759550] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.759550] env[61974]: self.force_reraise() [ 666.759550] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.759550] env[61974]: raise self.value [ 666.759550] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 666.759550] env[61974]: updated_port = self._update_port( [ 666.759550] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.759550] env[61974]: _ensure_no_port_binding_failure(port) [ 666.759550] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.759550] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 666.760236] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 51939b60-617f-4b25-93a9-b5ccbdda2671, please check neutron logs for more information. [ 666.760236] env[61974]: Removing descriptor: 20 [ 666.769644] env[61974]: DEBUG nova.virt.hardware [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 666.769890] env[61974]: DEBUG nova.virt.hardware [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 666.770065] env[61974]: DEBUG nova.virt.hardware [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 666.770252] env[61974]: DEBUG nova.virt.hardware [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 666.770399] env[61974]: DEBUG nova.virt.hardware [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 666.770600] env[61974]: DEBUG nova.virt.hardware [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 666.772535] env[61974]: DEBUG nova.virt.hardware [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 666.772535] env[61974]: DEBUG nova.virt.hardware [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 666.772535] env[61974]: DEBUG nova.virt.hardware [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 666.772782] env[61974]: DEBUG nova.virt.hardware [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 666.772782] env[61974]: DEBUG nova.virt.hardware [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 666.774221] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf90b82-5add-443f-acb6-5b59921fb1a9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.782916] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce5dbd2-5cd4-4ee8-9dfa-a27aab8574c6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.796893] env[61974]: ERROR nova.compute.manager [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 51939b60-617f-4b25-93a9-b5ccbdda2671, please check neutron logs for more information. [ 666.796893] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Traceback (most recent call last): [ 666.796893] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 666.796893] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] yield resources [ 666.796893] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 666.796893] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] self.driver.spawn(context, instance, image_meta, [ 666.796893] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 666.796893] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 666.796893] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 666.796893] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] vm_ref = self.build_virtual_machine(instance, [ 666.796893] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 666.797168] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] vif_infos = vmwarevif.get_vif_info(self._session, [ 666.797168] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 666.797168] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] for vif in network_info: [ 666.797168] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 666.797168] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] return self._sync_wrapper(fn, *args, **kwargs) [ 666.797168] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 666.797168] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] self.wait() [ 666.797168] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 666.797168] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] self[:] = self._gt.wait() [ 666.797168] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 666.797168] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] return self._exit_event.wait() [ 666.797168] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 666.797168] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] current.throw(*self._exc) [ 666.797413] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 666.797413] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] result = function(*args, **kwargs) [ 666.797413] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 666.797413] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] return func(*args, **kwargs) [ 666.797413] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 666.797413] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] raise e [ 666.797413] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 666.797413] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] nwinfo = self.network_api.allocate_for_instance( [ 666.797413] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 666.797413] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] created_port_ids = self._update_ports_for_instance( [ 666.797413] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 666.797413] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] with excutils.save_and_reraise_exception(): [ 666.797413] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.797663] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] self.force_reraise() [ 666.797663] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.797663] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] raise self.value [ 666.797663] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 666.797663] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] updated_port = self._update_port( [ 666.797663] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.797663] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] _ensure_no_port_binding_failure(port) [ 666.797663] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.797663] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] raise exception.PortBindingFailed(port_id=port['id']) [ 666.797663] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] nova.exception.PortBindingFailed: Binding failed for port 51939b60-617f-4b25-93a9-b5ccbdda2671, please check neutron logs for more information. [ 666.797663] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] [ 666.797663] env[61974]: INFO nova.compute.manager [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Terminating instance [ 666.799114] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Acquiring lock "refresh_cache-59c238dd-10f0-437c-a794-79bc87f05f2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 667.018434] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.063877] env[61974]: DEBUG nova.network.neutron [req-2e263e32-d226-4f81-ad20-62dd85f31e57 req-c81f84a0-b13e-4609-8801-b4a8126e6e6a service nova] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 667.149030] env[61974]: DEBUG nova.network.neutron [req-2e263e32-d226-4f81-ad20-62dd85f31e57 req-c81f84a0-b13e-4609-8801-b4a8126e6e6a service nova] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.259045] env[61974]: DEBUG nova.network.neutron [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 667.372492] env[61974]: DEBUG nova.network.neutron [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.635577] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec21620-5659-4415-9605-204bee77060e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.645529] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549694ed-6778-49b5-8790-9f0117c6d96b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.651238] env[61974]: DEBUG oslo_concurrency.lockutils [req-2e263e32-d226-4f81-ad20-62dd85f31e57 req-c81f84a0-b13e-4609-8801-b4a8126e6e6a service nova] Releasing lock "refresh_cache-59c238dd-10f0-437c-a794-79bc87f05f2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.679766] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Acquired lock "refresh_cache-59c238dd-10f0-437c-a794-79bc87f05f2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.679991] env[61974]: DEBUG nova.network.neutron [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 667.682753] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba5807f-3f52-4cc1-9c1b-a0278d1324e8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.692528] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa6026fe-ff4b-4c0e-be5c-8e35d7c0877f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.707839] env[61974]: DEBUG nova.compute.provider_tree [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 667.879814] env[61974]: DEBUG oslo_concurrency.lockutils [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Releasing lock "refresh_cache-622aca09-aab9-4e93-b4d3-621d33df7903" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.879814] env[61974]: DEBUG nova.compute.manager [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 667.879814] env[61974]: DEBUG nova.compute.manager [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 667.879814] env[61974]: DEBUG nova.network.neutron [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 667.897991] env[61974]: DEBUG nova.network.neutron [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 668.028291] env[61974]: DEBUG nova.compute.manager [None req-131794fa-60c7-4589-9fa2-c589c9c43eb4 tempest-ServerDiagnosticsV248Test-2108047383 tempest-ServerDiagnosticsV248Test-2108047383-project-admin] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 668.032028] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609f283f-4519-4008-bfd3-0874986b4da9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.043771] env[61974]: INFO nova.compute.manager [None req-131794fa-60c7-4589-9fa2-c589c9c43eb4 tempest-ServerDiagnosticsV248Test-2108047383 tempest-ServerDiagnosticsV248Test-2108047383-project-admin] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Retrieving diagnostics [ 668.044741] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792c8a0f-9676-4a76-a425-8ceacb851ad0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.201882] env[61974]: DEBUG nova.network.neutron [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 668.213949] env[61974]: DEBUG nova.scheduler.client.report [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 668.343381] env[61974]: DEBUG nova.network.neutron [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.402165] env[61974]: DEBUG nova.network.neutron [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.724994] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.992s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.724994] env[61974]: ERROR nova.compute.manager [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7167262b-7aaa-416e-9df6-32a4d288a9a7, please check neutron logs for more information. [ 668.724994] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Traceback (most recent call last): [ 668.724994] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 668.724994] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] self.driver.spawn(context, instance, image_meta, [ 668.724994] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 668.724994] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 668.724994] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 668.724994] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] vm_ref = self.build_virtual_machine(instance, [ 668.725316] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 668.725316] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] vif_infos = vmwarevif.get_vif_info(self._session, [ 668.725316] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 668.725316] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] for vif in network_info: [ 668.725316] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 668.725316] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] return self._sync_wrapper(fn, *args, **kwargs) [ 668.725316] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 668.725316] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] self.wait() [ 668.725316] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 668.725316] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] self[:] = self._gt.wait() [ 668.725316] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 668.725316] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] return self._exit_event.wait() [ 668.725316] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 668.725621] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] result = hub.switch() [ 668.725621] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 668.725621] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] return self.greenlet.switch() [ 668.725621] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 668.725621] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] result = function(*args, **kwargs) [ 668.725621] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 668.725621] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] return func(*args, **kwargs) [ 668.725621] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 668.725621] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] raise e [ 668.725621] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 668.725621] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] nwinfo = self.network_api.allocate_for_instance( [ 668.725621] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 668.725621] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] created_port_ids = self._update_ports_for_instance( [ 668.725886] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 668.725886] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] with excutils.save_and_reraise_exception(): [ 668.725886] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 668.725886] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] self.force_reraise() [ 668.725886] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 668.725886] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] raise self.value [ 668.725886] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 668.725886] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] updated_port = self._update_port( [ 668.725886] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 668.725886] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] _ensure_no_port_binding_failure(port) [ 668.725886] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 668.725886] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] raise exception.PortBindingFailed(port_id=port['id']) [ 668.726129] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] nova.exception.PortBindingFailed: Binding failed for port 7167262b-7aaa-416e-9df6-32a4d288a9a7, please check neutron logs for more information. [ 668.726129] env[61974]: ERROR nova.compute.manager [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] [ 668.726129] env[61974]: DEBUG nova.compute.utils [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Binding failed for port 7167262b-7aaa-416e-9df6-32a4d288a9a7, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 668.726129] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.153s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.730903] env[61974]: DEBUG nova.compute.manager [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Build of instance ef64bb0a-d462-4218-9ddf-7c019727f2ba was re-scheduled: Binding failed for port 7167262b-7aaa-416e-9df6-32a4d288a9a7, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 668.731360] env[61974]: DEBUG nova.compute.manager [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 668.731684] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Acquiring lock "refresh_cache-ef64bb0a-d462-4218-9ddf-7c019727f2ba" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 668.731770] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Acquired lock "refresh_cache-ef64bb0a-d462-4218-9ddf-7c019727f2ba" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.731869] env[61974]: DEBUG nova.network.neutron [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 668.739261] env[61974]: DEBUG nova.compute.manager [req-2b6cb61b-02a8-4baf-9ed3-e8c73f7b426f req-e8fad8a6-f861-49a5-9522-6efc1828f8b6 service nova] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Received event network-vif-deleted-51939b60-617f-4b25-93a9-b5ccbdda2671 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 668.847102] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Releasing lock "refresh_cache-59c238dd-10f0-437c-a794-79bc87f05f2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 668.847102] env[61974]: DEBUG nova.compute.manager [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 668.847236] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 668.847512] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-09607968-8be5-442f-b6b4-55dd33d104c5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.856870] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817a2faf-2d0a-4860-adbb-83811b447549 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.879649] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 59c238dd-10f0-437c-a794-79bc87f05f2e could not be found. [ 668.879889] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 668.880085] env[61974]: INFO nova.compute.manager [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Took 0.03 seconds to destroy the instance on the hypervisor. [ 668.880340] env[61974]: DEBUG oslo.service.loopingcall [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 668.880574] env[61974]: DEBUG nova.compute.manager [-] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 668.880670] env[61974]: DEBUG nova.network.neutron [-] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 668.902132] env[61974]: DEBUG nova.network.neutron [-] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 668.905923] env[61974]: INFO nova.compute.manager [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] [instance: 622aca09-aab9-4e93-b4d3-621d33df7903] Took 1.03 seconds to deallocate network for instance. [ 669.260206] env[61974]: DEBUG nova.network.neutron [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 669.280788] env[61974]: DEBUG oslo_concurrency.lockutils [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Acquiring lock "41fccade-6e5f-4642-8889-2ce00dbff1c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.281041] env[61974]: DEBUG oslo_concurrency.lockutils [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Lock "41fccade-6e5f-4642-8889-2ce00dbff1c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.281270] env[61974]: DEBUG oslo_concurrency.lockutils [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Acquiring lock "41fccade-6e5f-4642-8889-2ce00dbff1c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.281461] env[61974]: DEBUG oslo_concurrency.lockutils [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Lock "41fccade-6e5f-4642-8889-2ce00dbff1c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.281630] env[61974]: DEBUG oslo_concurrency.lockutils [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Lock "41fccade-6e5f-4642-8889-2ce00dbff1c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.284080] env[61974]: INFO nova.compute.manager [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Terminating instance [ 669.288415] env[61974]: DEBUG oslo_concurrency.lockutils [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Acquiring lock "refresh_cache-41fccade-6e5f-4642-8889-2ce00dbff1c7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 669.288676] env[61974]: DEBUG oslo_concurrency.lockutils [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Acquired lock "refresh_cache-41fccade-6e5f-4642-8889-2ce00dbff1c7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.288859] env[61974]: DEBUG nova.network.neutron [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 669.402622] env[61974]: DEBUG nova.network.neutron [-] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.410385] env[61974]: DEBUG nova.network.neutron [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.662634] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9731412f-bfa3-4664-8868-bc730b253aac {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.670857] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0097d6a5-6382-40e5-91f7-918dc3b7be3b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.702026] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979f39a3-e94e-4aaf-ad0f-b63ea55bd23e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.709655] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40754283-f3c3-44d5-b97c-a4ca35e63a80 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.725038] env[61974]: DEBUG nova.compute.provider_tree [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.807877] env[61974]: DEBUG nova.network.neutron [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 669.891141] env[61974]: DEBUG nova.network.neutron [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.905025] env[61974]: INFO nova.compute.manager [-] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Took 1.02 seconds to deallocate network for instance. [ 669.908254] env[61974]: DEBUG nova.compute.claims [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 669.908431] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.911130] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Releasing lock "refresh_cache-ef64bb0a-d462-4218-9ddf-7c019727f2ba" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 669.911336] env[61974]: DEBUG nova.compute.manager [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 669.911518] env[61974]: DEBUG nova.compute.manager [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 669.911722] env[61974]: DEBUG nova.network.neutron [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 669.929119] env[61974]: DEBUG nova.network.neutron [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 669.947402] env[61974]: INFO nova.scheduler.client.report [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Deleted allocations for instance 622aca09-aab9-4e93-b4d3-621d33df7903 [ 670.228016] env[61974]: DEBUG nova.scheduler.client.report [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 670.394206] env[61974]: DEBUG oslo_concurrency.lockutils [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Releasing lock "refresh_cache-41fccade-6e5f-4642-8889-2ce00dbff1c7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 670.397090] env[61974]: DEBUG nova.compute.manager [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 670.397090] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 670.397090] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d2ff07-0816-41af-9aab-6b8b44a0a809 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.405080] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 670.405817] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-39ce6807-24fe-4a50-8b27-f5ff52720a5c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.414121] env[61974]: DEBUG oslo_vmware.api [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Waiting for the task: (returnval){ [ 670.414121] env[61974]: value = "task-1378895" [ 670.414121] env[61974]: _type = "Task" [ 670.414121] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.420548] env[61974]: DEBUG oslo_vmware.api [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378895, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.432444] env[61974]: DEBUG nova.network.neutron [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.458729] env[61974]: DEBUG oslo_concurrency.lockutils [None req-086201a2-b7ef-4427-ab34-3979ca9133c5 tempest-ServersWithSpecificFlavorTestJSON-855968489 tempest-ServersWithSpecificFlavorTestJSON-855968489-project-member] Lock "622aca09-aab9-4e93-b4d3-621d33df7903" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.437s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.734197] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.008s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.734197] env[61974]: ERROR nova.compute.manager [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 81241c22-96f1-4162-9b50-5dcc59b8615c, please check neutron logs for more information. [ 670.734197] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] Traceback (most recent call last): [ 670.734197] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 670.734197] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] self.driver.spawn(context, instance, image_meta, [ 670.734197] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 670.734197] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 670.734197] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 670.734197] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] vm_ref = self.build_virtual_machine(instance, [ 670.734459] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 670.734459] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] vif_infos = vmwarevif.get_vif_info(self._session, [ 670.734459] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 670.734459] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] for vif in network_info: [ 670.734459] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 670.734459] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] return self._sync_wrapper(fn, *args, **kwargs) [ 670.734459] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 670.734459] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] self.wait() [ 670.734459] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 670.734459] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] self[:] = self._gt.wait() [ 670.734459] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 670.734459] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] return self._exit_event.wait() [ 670.734459] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 670.734715] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] result = hub.switch() [ 670.734715] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 670.734715] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] return self.greenlet.switch() [ 670.734715] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 670.734715] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] result = function(*args, **kwargs) [ 670.734715] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 670.734715] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] return func(*args, **kwargs) [ 670.734715] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 670.734715] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] raise e [ 670.734715] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 670.734715] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] nwinfo = self.network_api.allocate_for_instance( [ 670.734715] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 670.734715] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] created_port_ids = self._update_ports_for_instance( [ 670.734968] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 670.734968] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] with excutils.save_and_reraise_exception(): [ 670.734968] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 670.734968] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] self.force_reraise() [ 670.734968] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 670.734968] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] raise self.value [ 670.734968] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 670.734968] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] updated_port = self._update_port( [ 670.734968] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 670.734968] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] _ensure_no_port_binding_failure(port) [ 670.734968] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 670.734968] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] raise exception.PortBindingFailed(port_id=port['id']) [ 670.735273] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] nova.exception.PortBindingFailed: Binding failed for port 81241c22-96f1-4162-9b50-5dcc59b8615c, please check neutron logs for more information. [ 670.735273] env[61974]: ERROR nova.compute.manager [instance: 84448f61-d302-428f-b995-e942e27c39fd] [ 670.735273] env[61974]: DEBUG nova.compute.utils [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Binding failed for port 81241c22-96f1-4162-9b50-5dcc59b8615c, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 670.736720] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.465s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.740133] env[61974]: DEBUG nova.compute.manager [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Build of instance 84448f61-d302-428f-b995-e942e27c39fd was re-scheduled: Binding failed for port 81241c22-96f1-4162-9b50-5dcc59b8615c, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 670.740649] env[61974]: DEBUG nova.compute.manager [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 670.741337] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Acquiring lock "refresh_cache-84448f61-d302-428f-b995-e942e27c39fd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.741586] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Acquired lock "refresh_cache-84448f61-d302-428f-b995-e942e27c39fd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.742241] env[61974]: DEBUG nova.network.neutron [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 670.929029] env[61974]: DEBUG oslo_vmware.api [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378895, 'name': PowerOffVM_Task, 'duration_secs': 0.193087} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.929029] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 670.929029] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 670.929029] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aa678082-db14-4171-8ef7-f415e16c8d67 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.934387] env[61974]: INFO nova.compute.manager [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] [instance: ef64bb0a-d462-4218-9ddf-7c019727f2ba] Took 1.02 seconds to deallocate network for instance. [ 670.953992] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 670.954256] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 670.954459] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Deleting the datastore file [datastore1] 41fccade-6e5f-4642-8889-2ce00dbff1c7 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 670.954723] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59bfb804-cbd4-4fdf-9863-f85a1edda4b5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.961439] env[61974]: DEBUG oslo_vmware.api [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Waiting for the task: (returnval){ [ 670.961439] env[61974]: value = "task-1378897" [ 670.961439] env[61974]: _type = "Task" [ 670.961439] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.961742] env[61974]: DEBUG nova.compute.manager [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 670.973780] env[61974]: DEBUG oslo_vmware.api [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378897, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.275010] env[61974]: DEBUG nova.network.neutron [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 671.333183] env[61974]: DEBUG nova.network.neutron [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.498949] env[61974]: DEBUG oslo_vmware.api [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Task: {'id': task-1378897, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105319} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.498949] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 671.499370] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 671.499370] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 671.499620] env[61974]: INFO nova.compute.manager [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Took 1.10 seconds to destroy the instance on the hypervisor. [ 671.500433] env[61974]: DEBUG oslo.service.loopingcall [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 671.500709] env[61974]: DEBUG nova.compute.manager [-] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 671.501786] env[61974]: DEBUG nova.network.neutron [-] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 671.509157] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.531703] env[61974]: DEBUG nova.network.neutron [-] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 671.755979] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e25b61d2-d21d-48dc-a135-65c6700e9bcc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.764182] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d569e178-0805-4a39-85a3-8b446b147989 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.797547] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d43558-399b-4926-b761-569bdb30753d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.809569] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ef91d6-cdb8-48bc-8249-9ab1954c49c2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.823442] env[61974]: DEBUG nova.compute.provider_tree [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.838329] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Releasing lock "refresh_cache-84448f61-d302-428f-b995-e942e27c39fd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.838573] env[61974]: DEBUG nova.compute.manager [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 671.838741] env[61974]: DEBUG nova.compute.manager [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 671.838901] env[61974]: DEBUG nova.network.neutron [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 671.865285] env[61974]: DEBUG nova.network.neutron [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 671.966829] env[61974]: INFO nova.scheduler.client.report [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Deleted allocations for instance ef64bb0a-d462-4218-9ddf-7c019727f2ba [ 672.037265] env[61974]: DEBUG nova.network.neutron [-] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.327296] env[61974]: DEBUG nova.scheduler.client.report [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 672.367756] env[61974]: DEBUG nova.network.neutron [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.480793] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2e1577b1-60d6-467e-b3d4-3f3b8c6d755f tempest-VolumesAssistedSnapshotsTest-617180649 tempest-VolumesAssistedSnapshotsTest-617180649-project-member] Lock "ef64bb0a-d462-4218-9ddf-7c019727f2ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.377s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.543555] env[61974]: INFO nova.compute.manager [-] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Took 1.04 seconds to deallocate network for instance. [ 672.833203] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.097s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.833886] env[61974]: ERROR nova.compute.manager [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 137cbe69-c6b7-49c8-9037-a0fc85f4f4c1, please check neutron logs for more information. [ 672.833886] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Traceback (most recent call last): [ 672.833886] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 672.833886] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] self.driver.spawn(context, instance, image_meta, [ 672.833886] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 672.833886] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] self._vmops.spawn(context, instance, image_meta, injected_files, [ 672.833886] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 672.833886] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] vm_ref = self.build_virtual_machine(instance, [ 672.833886] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 672.833886] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] vif_infos = vmwarevif.get_vif_info(self._session, [ 672.833886] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 672.834139] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] for vif in network_info: [ 672.834139] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 672.834139] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] return self._sync_wrapper(fn, *args, **kwargs) [ 672.834139] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 672.834139] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] self.wait() [ 672.834139] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 672.834139] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] self[:] = self._gt.wait() [ 672.834139] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 672.834139] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] return self._exit_event.wait() [ 672.834139] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 672.834139] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] result = hub.switch() [ 672.834139] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 672.834139] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] return self.greenlet.switch() [ 672.834429] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 672.834429] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] result = function(*args, **kwargs) [ 672.834429] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 672.834429] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] return func(*args, **kwargs) [ 672.834429] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 672.834429] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] raise e [ 672.834429] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 672.834429] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] nwinfo = self.network_api.allocate_for_instance( [ 672.834429] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 672.834429] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] created_port_ids = self._update_ports_for_instance( [ 672.834429] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 672.834429] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] with excutils.save_and_reraise_exception(): [ 672.834429] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.834687] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] self.force_reraise() [ 672.834687] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.834687] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] raise self.value [ 672.834687] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 672.834687] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] updated_port = self._update_port( [ 672.834687] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.834687] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] _ensure_no_port_binding_failure(port) [ 672.834687] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.834687] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] raise exception.PortBindingFailed(port_id=port['id']) [ 672.834687] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] nova.exception.PortBindingFailed: Binding failed for port 137cbe69-c6b7-49c8-9037-a0fc85f4f4c1, please check neutron logs for more information. [ 672.834687] env[61974]: ERROR nova.compute.manager [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] [ 672.834922] env[61974]: DEBUG nova.compute.utils [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Binding failed for port 137cbe69-c6b7-49c8-9037-a0fc85f4f4c1, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 672.840093] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.364s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.843894] env[61974]: INFO nova.compute.claims [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 672.852020] env[61974]: DEBUG nova.compute.manager [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Build of instance 32b1f31b-1e2e-4f53-8e97-265f79a74899 was re-scheduled: Binding failed for port 137cbe69-c6b7-49c8-9037-a0fc85f4f4c1, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 672.852020] env[61974]: DEBUG nova.compute.manager [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 672.852020] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Acquiring lock "refresh_cache-32b1f31b-1e2e-4f53-8e97-265f79a74899" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.852020] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Acquired lock "refresh_cache-32b1f31b-1e2e-4f53-8e97-265f79a74899" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.852390] env[61974]: DEBUG nova.network.neutron [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 672.872382] env[61974]: INFO nova.compute.manager [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] [instance: 84448f61-d302-428f-b995-e942e27c39fd] Took 1.03 seconds to deallocate network for instance. [ 672.982556] env[61974]: DEBUG nova.compute.manager [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 673.055191] env[61974]: DEBUG oslo_concurrency.lockutils [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.376757] env[61974]: DEBUG nova.network.neutron [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 673.486024] env[61974]: DEBUG nova.network.neutron [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.517518] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.914900] env[61974]: INFO nova.scheduler.client.report [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Deleted allocations for instance 84448f61-d302-428f-b995-e942e27c39fd [ 673.993831] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Releasing lock "refresh_cache-32b1f31b-1e2e-4f53-8e97-265f79a74899" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.993831] env[61974]: DEBUG nova.compute.manager [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 673.993831] env[61974]: DEBUG nova.compute.manager [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 673.993831] env[61974]: DEBUG nova.network.neutron [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 674.024788] env[61974]: DEBUG nova.network.neutron [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 674.298786] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9269510-da21-4533-a4b9-39e7922c79de {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.307341] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e37a0c9a-beb1-4f75-991a-57f8c43f656d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.337827] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae54f6f-3af6-43d4-af1e-b3d8d3f1fe4c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.345458] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e6bcc7-62ca-4339-912c-375ca290462e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.960053] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91a3fc0a-376f-4cc1-b21a-50b31de95c4b tempest-TenantUsagesTestJSON-526315272 tempest-TenantUsagesTestJSON-526315272-project-member] Lock "84448f61-d302-428f-b995-e942e27c39fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.039s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.960737] env[61974]: DEBUG nova.network.neutron [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.973110] env[61974]: DEBUG nova.compute.provider_tree [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 675.463031] env[61974]: INFO nova.compute.manager [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] [instance: 32b1f31b-1e2e-4f53-8e97-265f79a74899] Took 1.47 seconds to deallocate network for instance. [ 675.478598] env[61974]: DEBUG nova.compute.manager [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 675.481676] env[61974]: DEBUG nova.scheduler.client.report [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 675.993128] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.153s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.993506] env[61974]: DEBUG nova.compute.manager [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 676.000160] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.838s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.020268] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 676.504485] env[61974]: DEBUG nova.compute.utils [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 676.510106] env[61974]: INFO nova.scheduler.client.report [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Deleted allocations for instance 32b1f31b-1e2e-4f53-8e97-265f79a74899 [ 676.515496] env[61974]: DEBUG nova.compute.manager [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 676.515639] env[61974]: DEBUG nova.network.neutron [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 676.637821] env[61974]: DEBUG nova.policy [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1ed05af1515b4bc1ac2eb63c138237cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c13c93c88665482b94f62e760f4c4e5b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 677.004255] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a35f278-ebd3-4ce7-872c-a6705b8ceff9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.013110] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-958806f1-8ae7-4bd1-9aff-566b2c8bc4e1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.016666] env[61974]: DEBUG nova.compute.manager [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 677.019772] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dbcf0672-65d5-49c8-a580-119f5e547652 tempest-InstanceActionsV221TestJSON-946819549 tempest-InstanceActionsV221TestJSON-946819549-project-member] Lock "32b1f31b-1e2e-4f53-8e97-265f79a74899" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.129s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 677.076716] env[61974]: DEBUG nova.compute.manager [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 677.080747] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db971cf5-f4af-4f56-a636-57ef30997663 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.092305] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782cd74e-22ec-453d-8f14-b8b90a2c9cb0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.108567] env[61974]: DEBUG nova.compute.provider_tree [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 677.182886] env[61974]: DEBUG nova.network.neutron [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Successfully created port: ab348f2c-e70a-4e4e-a612-4632d1f93c33 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 677.603806] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.613843] env[61974]: DEBUG nova.scheduler.client.report [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 678.027863] env[61974]: DEBUG nova.compute.manager [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 678.054522] env[61974]: DEBUG nova.virt.hardware [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 678.054779] env[61974]: DEBUG nova.virt.hardware [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 678.054935] env[61974]: DEBUG nova.virt.hardware [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 678.055136] env[61974]: DEBUG nova.virt.hardware [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 678.055284] env[61974]: DEBUG nova.virt.hardware [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 678.055430] env[61974]: DEBUG nova.virt.hardware [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 678.055675] env[61974]: DEBUG nova.virt.hardware [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 678.055787] env[61974]: DEBUG nova.virt.hardware [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 678.055946] env[61974]: DEBUG nova.virt.hardware [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 678.056117] env[61974]: DEBUG nova.virt.hardware [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 678.056340] env[61974]: DEBUG nova.virt.hardware [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 678.057240] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3abb4191-e0d7-40aa-9925-526e86e7ee30 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.065621] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2312716-a7c3-4006-ae56-c191d540c415 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.122502] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.124s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.123150] env[61974]: ERROR nova.compute.manager [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2c7e35d9-5de4-4553-8d4e-bc3608ff33fc, please check neutron logs for more information. [ 678.123150] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Traceback (most recent call last): [ 678.123150] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 678.123150] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] self.driver.spawn(context, instance, image_meta, [ 678.123150] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 678.123150] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] self._vmops.spawn(context, instance, image_meta, injected_files, [ 678.123150] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 678.123150] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] vm_ref = self.build_virtual_machine(instance, [ 678.123150] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 678.123150] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] vif_infos = vmwarevif.get_vif_info(self._session, [ 678.123150] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 678.123468] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] for vif in network_info: [ 678.123468] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 678.123468] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] return self._sync_wrapper(fn, *args, **kwargs) [ 678.123468] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 678.123468] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] self.wait() [ 678.123468] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 678.123468] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] self[:] = self._gt.wait() [ 678.123468] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 678.123468] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] return self._exit_event.wait() [ 678.123468] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 678.123468] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] result = hub.switch() [ 678.123468] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 678.123468] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] return self.greenlet.switch() [ 678.123868] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 678.123868] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] result = function(*args, **kwargs) [ 678.123868] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 678.123868] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] return func(*args, **kwargs) [ 678.123868] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 678.123868] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] raise e [ 678.123868] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 678.123868] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] nwinfo = self.network_api.allocate_for_instance( [ 678.123868] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 678.123868] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] created_port_ids = self._update_ports_for_instance( [ 678.123868] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 678.123868] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] with excutils.save_and_reraise_exception(): [ 678.123868] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 678.124211] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] self.force_reraise() [ 678.124211] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 678.124211] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] raise self.value [ 678.124211] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 678.124211] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] updated_port = self._update_port( [ 678.124211] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 678.124211] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] _ensure_no_port_binding_failure(port) [ 678.124211] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 678.124211] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] raise exception.PortBindingFailed(port_id=port['id']) [ 678.124211] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] nova.exception.PortBindingFailed: Binding failed for port 2c7e35d9-5de4-4553-8d4e-bc3608ff33fc, please check neutron logs for more information. [ 678.124211] env[61974]: ERROR nova.compute.manager [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] [ 678.124496] env[61974]: DEBUG nova.compute.utils [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Binding failed for port 2c7e35d9-5de4-4553-8d4e-bc3608ff33fc, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 678.129025] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.673s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.129025] env[61974]: INFO nova.compute.claims [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 678.131559] env[61974]: DEBUG nova.compute.manager [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Build of instance 270447a7-ebbf-4671-bc6c-522f23d21788 was re-scheduled: Binding failed for port 2c7e35d9-5de4-4553-8d4e-bc3608ff33fc, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 678.132347] env[61974]: DEBUG nova.compute.manager [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 678.132909] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Acquiring lock "refresh_cache-270447a7-ebbf-4671-bc6c-522f23d21788" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 678.133231] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Acquired lock "refresh_cache-270447a7-ebbf-4671-bc6c-522f23d21788" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.133687] env[61974]: DEBUG nova.network.neutron [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 678.182794] env[61974]: DEBUG nova.compute.manager [req-d271b365-0066-4274-8bf0-d3c95606d079 req-ddbced19-3b0b-43f2-8257-9382c3a66f5c service nova] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Received event network-changed-ab348f2c-e70a-4e4e-a612-4632d1f93c33 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 678.183046] env[61974]: DEBUG nova.compute.manager [req-d271b365-0066-4274-8bf0-d3c95606d079 req-ddbced19-3b0b-43f2-8257-9382c3a66f5c service nova] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Refreshing instance network info cache due to event network-changed-ab348f2c-e70a-4e4e-a612-4632d1f93c33. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 678.183269] env[61974]: DEBUG oslo_concurrency.lockutils [req-d271b365-0066-4274-8bf0-d3c95606d079 req-ddbced19-3b0b-43f2-8257-9382c3a66f5c service nova] Acquiring lock "refresh_cache-18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 678.183723] env[61974]: DEBUG oslo_concurrency.lockutils [req-d271b365-0066-4274-8bf0-d3c95606d079 req-ddbced19-3b0b-43f2-8257-9382c3a66f5c service nova] Acquired lock "refresh_cache-18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.183723] env[61974]: DEBUG nova.network.neutron [req-d271b365-0066-4274-8bf0-d3c95606d079 req-ddbced19-3b0b-43f2-8257-9382c3a66f5c service nova] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Refreshing network info cache for port ab348f2c-e70a-4e4e-a612-4632d1f93c33 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 678.278505] env[61974]: ERROR nova.compute.manager [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ab348f2c-e70a-4e4e-a612-4632d1f93c33, please check neutron logs for more information. [ 678.278505] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 678.278505] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 678.278505] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 678.278505] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 678.278505] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 678.278505] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 678.278505] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 678.278505] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 678.278505] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 678.278505] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 678.278505] env[61974]: ERROR nova.compute.manager raise self.value [ 678.278505] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 678.278505] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 678.278505] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 678.278505] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 678.279145] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 678.279145] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 678.279145] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ab348f2c-e70a-4e4e-a612-4632d1f93c33, please check neutron logs for more information. [ 678.279145] env[61974]: ERROR nova.compute.manager [ 678.279145] env[61974]: Traceback (most recent call last): [ 678.279145] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 678.279145] env[61974]: listener.cb(fileno) [ 678.279145] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 678.279145] env[61974]: result = function(*args, **kwargs) [ 678.279145] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 678.279145] env[61974]: return func(*args, **kwargs) [ 678.279145] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 678.279145] env[61974]: raise e [ 678.279145] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 678.279145] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 678.279145] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 678.279145] env[61974]: created_port_ids = self._update_ports_for_instance( [ 678.279145] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 678.279145] env[61974]: with excutils.save_and_reraise_exception(): [ 678.279145] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 678.279145] env[61974]: self.force_reraise() [ 678.279145] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 678.279145] env[61974]: raise self.value [ 678.279145] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 678.279145] env[61974]: updated_port = self._update_port( [ 678.279145] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 678.279145] env[61974]: _ensure_no_port_binding_failure(port) [ 678.279145] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 678.279145] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 678.280121] env[61974]: nova.exception.PortBindingFailed: Binding failed for port ab348f2c-e70a-4e4e-a612-4632d1f93c33, please check neutron logs for more information. [ 678.280121] env[61974]: Removing descriptor: 20 [ 678.280121] env[61974]: ERROR nova.compute.manager [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ab348f2c-e70a-4e4e-a612-4632d1f93c33, please check neutron logs for more information. [ 678.280121] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Traceback (most recent call last): [ 678.280121] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 678.280121] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] yield resources [ 678.280121] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 678.280121] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] self.driver.spawn(context, instance, image_meta, [ 678.280121] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 678.280121] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 678.280121] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 678.280121] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] vm_ref = self.build_virtual_machine(instance, [ 678.280489] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 678.280489] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] vif_infos = vmwarevif.get_vif_info(self._session, [ 678.280489] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 678.280489] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] for vif in network_info: [ 678.280489] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 678.280489] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] return self._sync_wrapper(fn, *args, **kwargs) [ 678.280489] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 678.280489] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] self.wait() [ 678.280489] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 678.280489] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] self[:] = self._gt.wait() [ 678.280489] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 678.280489] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] return self._exit_event.wait() [ 678.280489] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 678.280859] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] result = hub.switch() [ 678.280859] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 678.280859] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] return self.greenlet.switch() [ 678.280859] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 678.280859] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] result = function(*args, **kwargs) [ 678.280859] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 678.280859] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] return func(*args, **kwargs) [ 678.280859] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 678.280859] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] raise e [ 678.280859] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 678.280859] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] nwinfo = self.network_api.allocate_for_instance( [ 678.280859] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 678.280859] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] created_port_ids = self._update_ports_for_instance( [ 678.281135] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 678.281135] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] with excutils.save_and_reraise_exception(): [ 678.281135] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 678.281135] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] self.force_reraise() [ 678.281135] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 678.281135] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] raise self.value [ 678.281135] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 678.281135] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] updated_port = self._update_port( [ 678.281135] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 678.281135] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] _ensure_no_port_binding_failure(port) [ 678.281135] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 678.281135] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] raise exception.PortBindingFailed(port_id=port['id']) [ 678.281527] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] nova.exception.PortBindingFailed: Binding failed for port ab348f2c-e70a-4e4e-a612-4632d1f93c33, please check neutron logs for more information. [ 678.281527] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] [ 678.281527] env[61974]: INFO nova.compute.manager [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Terminating instance [ 678.281779] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Acquiring lock "refresh_cache-18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 678.662024] env[61974]: DEBUG nova.network.neutron [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 678.872141] env[61974]: DEBUG nova.network.neutron [req-d271b365-0066-4274-8bf0-d3c95606d079 req-ddbced19-3b0b-43f2-8257-9382c3a66f5c service nova] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.018793] env[61974]: DEBUG nova.network.neutron [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.152527] env[61974]: DEBUG nova.network.neutron [req-d271b365-0066-4274-8bf0-d3c95606d079 req-ddbced19-3b0b-43f2-8257-9382c3a66f5c service nova] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.524735] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Releasing lock "refresh_cache-270447a7-ebbf-4671-bc6c-522f23d21788" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 679.525617] env[61974]: DEBUG nova.compute.manager [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 679.525817] env[61974]: DEBUG nova.compute.manager [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 679.526244] env[61974]: DEBUG nova.network.neutron [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 679.558472] env[61974]: DEBUG nova.network.neutron [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.619007] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-832fd74b-9ef7-42dc-9c32-bdfb4e806ac7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.629394] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7c8f9c-407a-47e1-96aa-c62763af2030 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.671205] env[61974]: DEBUG oslo_concurrency.lockutils [req-d271b365-0066-4274-8bf0-d3c95606d079 req-ddbced19-3b0b-43f2-8257-9382c3a66f5c service nova] Releasing lock "refresh_cache-18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 679.671912] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Acquired lock "refresh_cache-18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.672112] env[61974]: DEBUG nova.network.neutron [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 679.675499] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437dcbe3-a1a2-4444-9c1c-bb183708642e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.683646] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b61f02-c907-43cd-b432-5329f5828ee0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.699522] env[61974]: DEBUG nova.compute.provider_tree [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 680.061560] env[61974]: DEBUG nova.network.neutron [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.203213] env[61974]: DEBUG nova.scheduler.client.report [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 680.211129] env[61974]: DEBUG nova.network.neutron [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 680.255448] env[61974]: DEBUG nova.compute.manager [req-9a9a6c64-5ab0-4d93-a554-b73b0e1c473d req-e799c3e0-9816-4b55-b325-5a5bbd470113 service nova] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Received event network-vif-deleted-ab348f2c-e70a-4e4e-a612-4632d1f93c33 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 680.440203] env[61974]: DEBUG nova.network.neutron [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.564804] env[61974]: INFO nova.compute.manager [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] [instance: 270447a7-ebbf-4671-bc6c-522f23d21788] Took 1.04 seconds to deallocate network for instance. [ 680.712049] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.587s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.712762] env[61974]: DEBUG nova.compute.manager [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 680.717948] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 17.944s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.719854] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.002s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.720403] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 680.720656] env[61974]: DEBUG oslo_concurrency.lockutils [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.138s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.727954] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3550d8-9d4d-4a56-9077-1cfc158c155c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.736102] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801e00af-a64c-4295-a041-28cc27e5d1c7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.752688] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc62a7d-b086-422e-9d2b-da54d000989e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.762189] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea904c3-122a-44da-b02d-b3473c91e6ff {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.799430] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181458MB free_disk=178GB free_vcpus=48 pci_devices=None {{(pid=61974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 680.799688] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 680.944470] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Releasing lock "refresh_cache-18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 680.944470] env[61974]: DEBUG nova.compute.manager [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 680.944470] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 680.944470] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7c0af0a9-7eb0-460b-a526-baff3008014e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.955908] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40359420-da72-432a-9177-33e538709368 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.982229] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0 could not be found. [ 680.982484] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 680.982674] env[61974]: INFO nova.compute.manager [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 680.982920] env[61974]: DEBUG oslo.service.loopingcall [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 680.983216] env[61974]: DEBUG nova.compute.manager [-] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 680.983333] env[61974]: DEBUG nova.network.neutron [-] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 681.012128] env[61974]: DEBUG nova.network.neutron [-] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 681.219107] env[61974]: DEBUG nova.compute.utils [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 681.220710] env[61974]: DEBUG nova.compute.manager [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 681.220896] env[61974]: DEBUG nova.network.neutron [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 681.323747] env[61974]: DEBUG nova.policy [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '934312112100440d8bbc689166b9d691', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fccd6a0b4bcf4e778822d7bb88fcc8bc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 681.515251] env[61974]: DEBUG nova.network.neutron [-] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.610276] env[61974]: INFO nova.scheduler.client.report [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Deleted allocations for instance 270447a7-ebbf-4671-bc6c-522f23d21788 [ 681.696049] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-616ee759-2725-46bf-a9da-050715a5cef4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.705638] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b79e3fd8-36f5-41db-9eeb-649af5931bd0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.739175] env[61974]: DEBUG nova.compute.manager [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 681.746041] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-362d59f7-fd19-47ee-86b2-b48b074243f6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.751035] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0387432-ba7b-4915-b96c-55cff8ad7947 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.766709] env[61974]: DEBUG nova.compute.provider_tree [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.864659] env[61974]: DEBUG nova.network.neutron [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Successfully created port: 5db9f7dd-b982-484c-9b67-73d7df3b7601 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 682.023348] env[61974]: INFO nova.compute.manager [-] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Took 1.04 seconds to deallocate network for instance. [ 682.026127] env[61974]: DEBUG nova.compute.claims [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 682.026127] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.119203] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0df479ec-5b85-40d2-b956-ed82925e477b tempest-DeleteServersAdminTestJSON-1226004970 tempest-DeleteServersAdminTestJSON-1226004970-project-member] Lock "270447a7-ebbf-4671-bc6c-522f23d21788" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.082s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.270140] env[61974]: DEBUG nova.scheduler.client.report [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 682.623949] env[61974]: DEBUG nova.compute.manager [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 682.753106] env[61974]: DEBUG nova.compute.manager [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 682.775537] env[61974]: DEBUG oslo_concurrency.lockutils [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.055s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.776587] env[61974]: ERROR nova.compute.manager [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4213fc5e-5fe2-4688-a8c5-65efea292464, please check neutron logs for more information. [ 682.776587] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Traceback (most recent call last): [ 682.776587] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 682.776587] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] self.driver.spawn(context, instance, image_meta, [ 682.776587] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 682.776587] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 682.776587] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 682.776587] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] vm_ref = self.build_virtual_machine(instance, [ 682.776587] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 682.776587] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] vif_infos = vmwarevif.get_vif_info(self._session, [ 682.776587] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 682.776846] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] for vif in network_info: [ 682.776846] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 682.776846] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] return self._sync_wrapper(fn, *args, **kwargs) [ 682.776846] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 682.776846] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] self.wait() [ 682.776846] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 682.776846] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] self[:] = self._gt.wait() [ 682.776846] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 682.776846] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] return self._exit_event.wait() [ 682.776846] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 682.776846] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] current.throw(*self._exc) [ 682.776846] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 682.776846] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] result = function(*args, **kwargs) [ 682.777338] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 682.777338] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] return func(*args, **kwargs) [ 682.777338] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 682.777338] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] raise e [ 682.777338] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 682.777338] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] nwinfo = self.network_api.allocate_for_instance( [ 682.777338] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 682.777338] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] created_port_ids = self._update_ports_for_instance( [ 682.777338] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 682.777338] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] with excutils.save_and_reraise_exception(): [ 682.777338] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 682.777338] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] self.force_reraise() [ 682.777338] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 682.777637] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] raise self.value [ 682.777637] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 682.777637] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] updated_port = self._update_port( [ 682.777637] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 682.777637] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] _ensure_no_port_binding_failure(port) [ 682.777637] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 682.777637] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] raise exception.PortBindingFailed(port_id=port['id']) [ 682.777637] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] nova.exception.PortBindingFailed: Binding failed for port 4213fc5e-5fe2-4688-a8c5-65efea292464, please check neutron logs for more information. [ 682.777637] env[61974]: ERROR nova.compute.manager [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] [ 682.777637] env[61974]: DEBUG nova.compute.utils [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Binding failed for port 4213fc5e-5fe2-4688-a8c5-65efea292464, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 682.779129] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.761s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.782229] env[61974]: INFO nova.compute.claims [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 682.785810] env[61974]: DEBUG nova.compute.manager [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Build of instance 1c7edeed-2fa7-4662-9994-21708dcb3efd was re-scheduled: Binding failed for port 4213fc5e-5fe2-4688-a8c5-65efea292464, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 682.786165] env[61974]: DEBUG nova.compute.manager [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 682.786258] env[61974]: DEBUG oslo_concurrency.lockutils [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Acquiring lock "refresh_cache-1c7edeed-2fa7-4662-9994-21708dcb3efd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.786975] env[61974]: DEBUG oslo_concurrency.lockutils [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Acquired lock "refresh_cache-1c7edeed-2fa7-4662-9994-21708dcb3efd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.786975] env[61974]: DEBUG nova.network.neutron [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 682.803689] env[61974]: DEBUG nova.virt.hardware [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 682.804107] env[61974]: DEBUG nova.virt.hardware [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 682.806349] env[61974]: DEBUG nova.virt.hardware [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 682.808149] env[61974]: DEBUG nova.virt.hardware [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 682.808149] env[61974]: DEBUG nova.virt.hardware [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 682.808149] env[61974]: DEBUG nova.virt.hardware [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 682.808149] env[61974]: DEBUG nova.virt.hardware [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 682.808149] env[61974]: DEBUG nova.virt.hardware [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 682.810571] env[61974]: DEBUG nova.virt.hardware [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 682.810571] env[61974]: DEBUG nova.virt.hardware [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 682.810571] env[61974]: DEBUG nova.virt.hardware [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 682.810657] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e945461f-de92-4d91-bb3c-a3f4c516f0d1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.819980] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8073013-5e33-41cd-9b30-4ccea474e7c4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.158159] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.323875] env[61974]: DEBUG nova.network.neutron [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 683.520242] env[61974]: DEBUG nova.network.neutron [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.814433] env[61974]: DEBUG nova.compute.manager [req-86a28c91-1024-4a7d-8e11-efba3dc28f5c req-1ed621c4-9ff6-4d66-b4f3-17c52e9136dc service nova] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Received event network-changed-5db9f7dd-b982-484c-9b67-73d7df3b7601 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 683.814538] env[61974]: DEBUG nova.compute.manager [req-86a28c91-1024-4a7d-8e11-efba3dc28f5c req-1ed621c4-9ff6-4d66-b4f3-17c52e9136dc service nova] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Refreshing instance network info cache due to event network-changed-5db9f7dd-b982-484c-9b67-73d7df3b7601. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 683.814827] env[61974]: DEBUG oslo_concurrency.lockutils [req-86a28c91-1024-4a7d-8e11-efba3dc28f5c req-1ed621c4-9ff6-4d66-b4f3-17c52e9136dc service nova] Acquiring lock "refresh_cache-9d599717-0bda-4996-89d8-c41ce089eaac" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 683.814915] env[61974]: DEBUG oslo_concurrency.lockutils [req-86a28c91-1024-4a7d-8e11-efba3dc28f5c req-1ed621c4-9ff6-4d66-b4f3-17c52e9136dc service nova] Acquired lock "refresh_cache-9d599717-0bda-4996-89d8-c41ce089eaac" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.815174] env[61974]: DEBUG nova.network.neutron [req-86a28c91-1024-4a7d-8e11-efba3dc28f5c req-1ed621c4-9ff6-4d66-b4f3-17c52e9136dc service nova] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Refreshing network info cache for port 5db9f7dd-b982-484c-9b67-73d7df3b7601 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 684.023204] env[61974]: DEBUG oslo_concurrency.lockutils [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Releasing lock "refresh_cache-1c7edeed-2fa7-4662-9994-21708dcb3efd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 684.024342] env[61974]: DEBUG nova.compute.manager [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 684.024531] env[61974]: DEBUG nova.compute.manager [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 684.024697] env[61974]: DEBUG nova.network.neutron [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 684.055187] env[61974]: DEBUG nova.network.neutron [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 684.108442] env[61974]: ERROR nova.compute.manager [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5db9f7dd-b982-484c-9b67-73d7df3b7601, please check neutron logs for more information. [ 684.108442] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 684.108442] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 684.108442] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 684.108442] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 684.108442] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 684.108442] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 684.108442] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 684.108442] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 684.108442] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 684.108442] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 684.108442] env[61974]: ERROR nova.compute.manager raise self.value [ 684.108442] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 684.108442] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 684.108442] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 684.108442] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 684.108882] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 684.108882] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 684.108882] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5db9f7dd-b982-484c-9b67-73d7df3b7601, please check neutron logs for more information. [ 684.108882] env[61974]: ERROR nova.compute.manager [ 684.108882] env[61974]: Traceback (most recent call last): [ 684.108882] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 684.108882] env[61974]: listener.cb(fileno) [ 684.108882] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 684.108882] env[61974]: result = function(*args, **kwargs) [ 684.108882] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 684.108882] env[61974]: return func(*args, **kwargs) [ 684.108882] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 684.108882] env[61974]: raise e [ 684.108882] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 684.108882] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 684.108882] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 684.108882] env[61974]: created_port_ids = self._update_ports_for_instance( [ 684.108882] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 684.108882] env[61974]: with excutils.save_and_reraise_exception(): [ 684.108882] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 684.108882] env[61974]: self.force_reraise() [ 684.108882] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 684.108882] env[61974]: raise self.value [ 684.108882] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 684.108882] env[61974]: updated_port = self._update_port( [ 684.108882] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 684.108882] env[61974]: _ensure_no_port_binding_failure(port) [ 684.108882] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 684.108882] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 684.109506] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 5db9f7dd-b982-484c-9b67-73d7df3b7601, please check neutron logs for more information. [ 684.109506] env[61974]: Removing descriptor: 21 [ 684.109506] env[61974]: ERROR nova.compute.manager [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5db9f7dd-b982-484c-9b67-73d7df3b7601, please check neutron logs for more information. [ 684.109506] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Traceback (most recent call last): [ 684.109506] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 684.109506] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] yield resources [ 684.109506] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 684.109506] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] self.driver.spawn(context, instance, image_meta, [ 684.109506] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 684.109506] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 684.109506] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 684.109506] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] vm_ref = self.build_virtual_machine(instance, [ 684.109777] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 684.109777] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] vif_infos = vmwarevif.get_vif_info(self._session, [ 684.109777] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 684.109777] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] for vif in network_info: [ 684.109777] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 684.109777] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] return self._sync_wrapper(fn, *args, **kwargs) [ 684.109777] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 684.109777] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] self.wait() [ 684.109777] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 684.109777] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] self[:] = self._gt.wait() [ 684.109777] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 684.109777] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] return self._exit_event.wait() [ 684.109777] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 684.110144] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] result = hub.switch() [ 684.110144] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 684.110144] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] return self.greenlet.switch() [ 684.110144] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 684.110144] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] result = function(*args, **kwargs) [ 684.110144] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 684.110144] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] return func(*args, **kwargs) [ 684.110144] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 684.110144] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] raise e [ 684.110144] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 684.110144] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] nwinfo = self.network_api.allocate_for_instance( [ 684.110144] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 684.110144] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] created_port_ids = self._update_ports_for_instance( [ 684.110440] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 684.110440] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] with excutils.save_and_reraise_exception(): [ 684.110440] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 684.110440] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] self.force_reraise() [ 684.110440] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 684.110440] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] raise self.value [ 684.110440] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 684.110440] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] updated_port = self._update_port( [ 684.110440] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 684.110440] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] _ensure_no_port_binding_failure(port) [ 684.110440] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 684.110440] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] raise exception.PortBindingFailed(port_id=port['id']) [ 684.110703] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] nova.exception.PortBindingFailed: Binding failed for port 5db9f7dd-b982-484c-9b67-73d7df3b7601, please check neutron logs for more information. [ 684.110703] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] [ 684.110703] env[61974]: INFO nova.compute.manager [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Terminating instance [ 684.112033] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Acquiring lock "refresh_cache-9d599717-0bda-4996-89d8-c41ce089eaac" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.258202] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-695008e6-b915-42b7-8a1a-8dd8dd1adf91 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.266721] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f48fce55-6848-4eb3-b78d-cc0c086e0b92 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.304278] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6101b8d-2621-48a3-a29f-a9f7f4b2f041 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.312566] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6488af70-2843-4ad5-82ba-e16d5701f73f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.331298] env[61974]: DEBUG nova.compute.provider_tree [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 684.358695] env[61974]: DEBUG nova.network.neutron [req-86a28c91-1024-4a7d-8e11-efba3dc28f5c req-1ed621c4-9ff6-4d66-b4f3-17c52e9136dc service nova] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 684.457198] env[61974]: DEBUG nova.network.neutron [req-86a28c91-1024-4a7d-8e11-efba3dc28f5c req-1ed621c4-9ff6-4d66-b4f3-17c52e9136dc service nova] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.557947] env[61974]: DEBUG nova.network.neutron [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.836430] env[61974]: DEBUG nova.scheduler.client.report [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 684.960422] env[61974]: DEBUG oslo_concurrency.lockutils [req-86a28c91-1024-4a7d-8e11-efba3dc28f5c req-1ed621c4-9ff6-4d66-b4f3-17c52e9136dc service nova] Releasing lock "refresh_cache-9d599717-0bda-4996-89d8-c41ce089eaac" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 684.962126] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Acquired lock "refresh_cache-9d599717-0bda-4996-89d8-c41ce089eaac" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.962126] env[61974]: DEBUG nova.network.neutron [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 685.061593] env[61974]: INFO nova.compute.manager [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: 1c7edeed-2fa7-4662-9994-21708dcb3efd] Took 1.04 seconds to deallocate network for instance. [ 685.345418] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.567s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.345956] env[61974]: DEBUG nova.compute.manager [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 685.348791] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.440s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.486229] env[61974]: DEBUG nova.network.neutron [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 685.653086] env[61974]: DEBUG nova.network.neutron [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.853515] env[61974]: DEBUG nova.compute.utils [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 685.858296] env[61974]: DEBUG nova.compute.manager [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 685.858484] env[61974]: DEBUG nova.network.neutron [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 685.927025] env[61974]: DEBUG nova.policy [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91e186033f624742a59502ddf87167f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '102785ae1c584cdb925a55afc3412fb9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 686.104574] env[61974]: DEBUG nova.compute.manager [req-b2477667-01c3-4382-b383-45d9d8bedd96 req-a15343ba-83c9-466b-973b-c8e9ac14a813 service nova] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Received event network-vif-deleted-5db9f7dd-b982-484c-9b67-73d7df3b7601 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 686.109856] env[61974]: INFO nova.scheduler.client.report [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Deleted allocations for instance 1c7edeed-2fa7-4662-9994-21708dcb3efd [ 686.156718] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Releasing lock "refresh_cache-9d599717-0bda-4996-89d8-c41ce089eaac" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.157190] env[61974]: DEBUG nova.compute.manager [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 686.157380] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 686.159033] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4dab055a-3a23-4f42-962c-808c3bfc22e3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.175244] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89379143-069f-4189-8457-1df8df119605 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.205234] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9d599717-0bda-4996-89d8-c41ce089eaac could not be found. [ 686.205485] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 686.205684] env[61974]: INFO nova.compute.manager [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Took 0.05 seconds to destroy the instance on the hypervisor. [ 686.205936] env[61974]: DEBUG oslo.service.loopingcall [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 686.209354] env[61974]: DEBUG nova.compute.manager [-] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 686.209576] env[61974]: DEBUG nova.network.neutron [-] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 686.245422] env[61974]: DEBUG nova.network.neutron [-] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 686.271160] env[61974]: DEBUG oslo_concurrency.lockutils [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Acquiring lock "2b74ee60-ce70-429a-9ccb-1f96c236cf8c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.271458] env[61974]: DEBUG oslo_concurrency.lockutils [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Lock "2b74ee60-ce70-429a-9ccb-1f96c236cf8c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.315042] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2719a282-e827-4c85-8103-3dde30cc0736 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.322064] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2642ed42-5e94-44a0-8610-0e859cda3cac {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.358838] env[61974]: DEBUG nova.compute.manager [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 686.362171] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6efe7ec4-a06f-49da-b1e9-3ef2fdb674dd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.371716] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea3da0a7-c485-494c-8307-db85c5b72aa0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.389101] env[61974]: DEBUG nova.compute.provider_tree [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 686.481435] env[61974]: DEBUG nova.network.neutron [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Successfully created port: 95b37ae0-fbac-4f29-962e-50cd9dfdca8b {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 686.623023] env[61974]: DEBUG oslo_concurrency.lockutils [None req-81740189-97ab-4baf-8fc4-478de96dd0dc tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Lock "1c7edeed-2fa7-4662-9994-21708dcb3efd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.206s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.748545] env[61974]: DEBUG nova.network.neutron [-] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.894342] env[61974]: DEBUG nova.scheduler.client.report [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 687.127076] env[61974]: DEBUG nova.compute.manager [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 687.252133] env[61974]: INFO nova.compute.manager [-] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Took 1.04 seconds to deallocate network for instance. [ 687.255085] env[61974]: DEBUG nova.compute.claims [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 687.255307] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.372114] env[61974]: DEBUG nova.compute.manager [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 687.405919] env[61974]: DEBUG nova.virt.hardware [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 687.406184] env[61974]: DEBUG nova.virt.hardware [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 687.406339] env[61974]: DEBUG nova.virt.hardware [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 687.406866] env[61974]: DEBUG nova.virt.hardware [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 687.406866] env[61974]: DEBUG nova.virt.hardware [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 687.410285] env[61974]: DEBUG nova.virt.hardware [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 687.410285] env[61974]: DEBUG nova.virt.hardware [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 687.410285] env[61974]: DEBUG nova.virt.hardware [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 687.410285] env[61974]: DEBUG nova.virt.hardware [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 687.410285] env[61974]: DEBUG nova.virt.hardware [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 687.410435] env[61974]: DEBUG nova.virt.hardware [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 687.410435] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.061s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.411079] env[61974]: ERROR nova.compute.manager [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 51939b60-617f-4b25-93a9-b5ccbdda2671, please check neutron logs for more information. [ 687.411079] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Traceback (most recent call last): [ 687.411079] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 687.411079] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] self.driver.spawn(context, instance, image_meta, [ 687.411079] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 687.411079] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 687.411079] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 687.411079] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] vm_ref = self.build_virtual_machine(instance, [ 687.411079] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 687.411079] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] vif_infos = vmwarevif.get_vif_info(self._session, [ 687.411079] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 687.411450] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] for vif in network_info: [ 687.411450] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 687.411450] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] return self._sync_wrapper(fn, *args, **kwargs) [ 687.411450] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 687.411450] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] self.wait() [ 687.411450] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 687.411450] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] self[:] = self._gt.wait() [ 687.411450] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 687.411450] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] return self._exit_event.wait() [ 687.411450] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 687.411450] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] current.throw(*self._exc) [ 687.411450] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 687.411450] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] result = function(*args, **kwargs) [ 687.411730] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 687.411730] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] return func(*args, **kwargs) [ 687.411730] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 687.411730] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] raise e [ 687.411730] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 687.411730] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] nwinfo = self.network_api.allocate_for_instance( [ 687.411730] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 687.411730] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] created_port_ids = self._update_ports_for_instance( [ 687.411730] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 687.411730] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] with excutils.save_and_reraise_exception(): [ 687.411730] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 687.411730] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] self.force_reraise() [ 687.411730] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 687.412016] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] raise self.value [ 687.412016] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 687.412016] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] updated_port = self._update_port( [ 687.412016] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 687.412016] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] _ensure_no_port_binding_failure(port) [ 687.412016] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 687.412016] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] raise exception.PortBindingFailed(port_id=port['id']) [ 687.412016] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] nova.exception.PortBindingFailed: Binding failed for port 51939b60-617f-4b25-93a9-b5ccbdda2671, please check neutron logs for more information. [ 687.412016] env[61974]: ERROR nova.compute.manager [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] [ 687.412016] env[61974]: DEBUG nova.compute.utils [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Binding failed for port 51939b60-617f-4b25-93a9-b5ccbdda2671, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 687.413186] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa9e8729-2f67-49f8-b920-1ad6434acf67 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.415977] env[61974]: DEBUG nova.compute.manager [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Build of instance 59c238dd-10f0-437c-a794-79bc87f05f2e was re-scheduled: Binding failed for port 51939b60-617f-4b25-93a9-b5ccbdda2671, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 687.416423] env[61974]: DEBUG nova.compute.manager [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 687.416682] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Acquiring lock "refresh_cache-59c238dd-10f0-437c-a794-79bc87f05f2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.416942] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Acquired lock "refresh_cache-59c238dd-10f0-437c-a794-79bc87f05f2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.417133] env[61974]: DEBUG nova.network.neutron [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 687.418721] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.909s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.420106] env[61974]: INFO nova.compute.claims [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 687.430167] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e14fac-387f-4fb3-9e7b-7109299fade8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.652885] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.813584] env[61974]: ERROR nova.compute.manager [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 95b37ae0-fbac-4f29-962e-50cd9dfdca8b, please check neutron logs for more information. [ 687.813584] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 687.813584] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 687.813584] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 687.813584] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 687.813584] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 687.813584] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 687.813584] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 687.813584] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 687.813584] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 687.813584] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 687.813584] env[61974]: ERROR nova.compute.manager raise self.value [ 687.813584] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 687.813584] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 687.813584] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 687.813584] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 687.813964] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 687.813964] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 687.813964] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 95b37ae0-fbac-4f29-962e-50cd9dfdca8b, please check neutron logs for more information. [ 687.813964] env[61974]: ERROR nova.compute.manager [ 687.813964] env[61974]: Traceback (most recent call last): [ 687.813964] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 687.813964] env[61974]: listener.cb(fileno) [ 687.813964] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 687.813964] env[61974]: result = function(*args, **kwargs) [ 687.813964] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 687.813964] env[61974]: return func(*args, **kwargs) [ 687.813964] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 687.813964] env[61974]: raise e [ 687.813964] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 687.813964] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 687.813964] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 687.813964] env[61974]: created_port_ids = self._update_ports_for_instance( [ 687.813964] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 687.813964] env[61974]: with excutils.save_and_reraise_exception(): [ 687.813964] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 687.813964] env[61974]: self.force_reraise() [ 687.813964] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 687.813964] env[61974]: raise self.value [ 687.813964] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 687.813964] env[61974]: updated_port = self._update_port( [ 687.813964] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 687.813964] env[61974]: _ensure_no_port_binding_failure(port) [ 687.813964] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 687.813964] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 687.814589] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 95b37ae0-fbac-4f29-962e-50cd9dfdca8b, please check neutron logs for more information. [ 687.814589] env[61974]: Removing descriptor: 21 [ 687.814589] env[61974]: ERROR nova.compute.manager [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 95b37ae0-fbac-4f29-962e-50cd9dfdca8b, please check neutron logs for more information. [ 687.814589] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Traceback (most recent call last): [ 687.814589] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 687.814589] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] yield resources [ 687.814589] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 687.814589] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] self.driver.spawn(context, instance, image_meta, [ 687.814589] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 687.814589] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] self._vmops.spawn(context, instance, image_meta, injected_files, [ 687.814589] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 687.814589] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] vm_ref = self.build_virtual_machine(instance, [ 687.814852] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 687.814852] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] vif_infos = vmwarevif.get_vif_info(self._session, [ 687.814852] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 687.814852] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] for vif in network_info: [ 687.814852] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 687.814852] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] return self._sync_wrapper(fn, *args, **kwargs) [ 687.814852] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 687.814852] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] self.wait() [ 687.814852] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 687.814852] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] self[:] = self._gt.wait() [ 687.814852] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 687.814852] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] return self._exit_event.wait() [ 687.814852] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 687.815143] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] result = hub.switch() [ 687.815143] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 687.815143] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] return self.greenlet.switch() [ 687.815143] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 687.815143] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] result = function(*args, **kwargs) [ 687.815143] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 687.815143] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] return func(*args, **kwargs) [ 687.815143] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 687.815143] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] raise e [ 687.815143] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 687.815143] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] nwinfo = self.network_api.allocate_for_instance( [ 687.815143] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 687.815143] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] created_port_ids = self._update_ports_for_instance( [ 687.815558] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 687.815558] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] with excutils.save_and_reraise_exception(): [ 687.815558] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 687.815558] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] self.force_reraise() [ 687.815558] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 687.815558] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] raise self.value [ 687.815558] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 687.815558] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] updated_port = self._update_port( [ 687.815558] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 687.815558] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] _ensure_no_port_binding_failure(port) [ 687.815558] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 687.815558] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] raise exception.PortBindingFailed(port_id=port['id']) [ 687.815874] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] nova.exception.PortBindingFailed: Binding failed for port 95b37ae0-fbac-4f29-962e-50cd9dfdca8b, please check neutron logs for more information. [ 687.815874] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] [ 687.815874] env[61974]: INFO nova.compute.manager [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Terminating instance [ 687.817067] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "refresh_cache-8a00b16d-8274-4728-920b-a30e95fa4048" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.817067] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquired lock "refresh_cache-8a00b16d-8274-4728-920b-a30e95fa4048" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.817207] env[61974]: DEBUG nova.network.neutron [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 687.946843] env[61974]: DEBUG nova.network.neutron [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 688.284847] env[61974]: DEBUG nova.network.neutron [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.341724] env[61974]: DEBUG nova.network.neutron [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 688.368632] env[61974]: DEBUG nova.compute.manager [req-5e52d65b-0dcf-4a81-bb40-b2de56ac7008 req-fa84b0bf-edc6-4323-8b5f-82c9da9f083f service nova] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Received event network-changed-95b37ae0-fbac-4f29-962e-50cd9dfdca8b {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 688.369117] env[61974]: DEBUG nova.compute.manager [req-5e52d65b-0dcf-4a81-bb40-b2de56ac7008 req-fa84b0bf-edc6-4323-8b5f-82c9da9f083f service nova] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Refreshing instance network info cache due to event network-changed-95b37ae0-fbac-4f29-962e-50cd9dfdca8b. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 688.369337] env[61974]: DEBUG oslo_concurrency.lockutils [req-5e52d65b-0dcf-4a81-bb40-b2de56ac7008 req-fa84b0bf-edc6-4323-8b5f-82c9da9f083f service nova] Acquiring lock "refresh_cache-8a00b16d-8274-4728-920b-a30e95fa4048" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 688.426213] env[61974]: DEBUG nova.network.neutron [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.787743] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Releasing lock "refresh_cache-59c238dd-10f0-437c-a794-79bc87f05f2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.788189] env[61974]: DEBUG nova.compute.manager [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 688.788291] env[61974]: DEBUG nova.compute.manager [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 688.789052] env[61974]: DEBUG nova.network.neutron [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 688.812217] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da3dcb4-69d8-429b-bfa4-f329db2940db {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.820214] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50a8b04-10f4-47ea-8a2c-affed0dbb919 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.854107] env[61974]: DEBUG nova.network.neutron [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 688.856053] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7318d6e0-7432-4861-9aa8-4688fcdcb25d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.865864] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2add2df2-bb36-4d59-845b-204919358217 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.880856] env[61974]: DEBUG nova.compute.provider_tree [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 688.925922] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Releasing lock "refresh_cache-8a00b16d-8274-4728-920b-a30e95fa4048" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.926432] env[61974]: DEBUG nova.compute.manager [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 688.926671] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 688.926972] env[61974]: DEBUG oslo_concurrency.lockutils [req-5e52d65b-0dcf-4a81-bb40-b2de56ac7008 req-fa84b0bf-edc6-4323-8b5f-82c9da9f083f service nova] Acquired lock "refresh_cache-8a00b16d-8274-4728-920b-a30e95fa4048" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.927156] env[61974]: DEBUG nova.network.neutron [req-5e52d65b-0dcf-4a81-bb40-b2de56ac7008 req-fa84b0bf-edc6-4323-8b5f-82c9da9f083f service nova] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Refreshing network info cache for port 95b37ae0-fbac-4f29-962e-50cd9dfdca8b {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 688.928395] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ad1d6835-f089-4167-a478-13b92afb764c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.939997] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b7ca4e-c979-4069-a195-db03e605cfc6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.967299] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8a00b16d-8274-4728-920b-a30e95fa4048 could not be found. [ 688.967760] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 688.968033] env[61974]: INFO nova.compute.manager [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Took 0.04 seconds to destroy the instance on the hypervisor. [ 688.968342] env[61974]: DEBUG oslo.service.loopingcall [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 688.968878] env[61974]: DEBUG nova.compute.manager [-] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 688.969018] env[61974]: DEBUG nova.network.neutron [-] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 689.000360] env[61974]: DEBUG nova.network.neutron [-] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 689.359926] env[61974]: DEBUG nova.network.neutron [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.384681] env[61974]: DEBUG nova.scheduler.client.report [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 689.465922] env[61974]: DEBUG nova.network.neutron [req-5e52d65b-0dcf-4a81-bb40-b2de56ac7008 req-fa84b0bf-edc6-4323-8b5f-82c9da9f083f service nova] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 689.502942] env[61974]: DEBUG nova.network.neutron [-] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.663068] env[61974]: DEBUG nova.network.neutron [req-5e52d65b-0dcf-4a81-bb40-b2de56ac7008 req-fa84b0bf-edc6-4323-8b5f-82c9da9f083f service nova] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.869817] env[61974]: INFO nova.compute.manager [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 59c238dd-10f0-437c-a794-79bc87f05f2e] Took 1.08 seconds to deallocate network for instance. [ 689.891380] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.473s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.891506] env[61974]: DEBUG nova.compute.manager [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 689.895173] env[61974]: DEBUG oslo_concurrency.lockutils [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.839s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 689.895173] env[61974]: DEBUG nova.objects.instance [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Lazy-loading 'resources' on Instance uuid 41fccade-6e5f-4642-8889-2ce00dbff1c7 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 690.009415] env[61974]: INFO nova.compute.manager [-] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Took 1.04 seconds to deallocate network for instance. [ 690.011365] env[61974]: DEBUG nova.compute.claims [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 690.011709] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.164456] env[61974]: DEBUG oslo_concurrency.lockutils [req-5e52d65b-0dcf-4a81-bb40-b2de56ac7008 req-fa84b0bf-edc6-4323-8b5f-82c9da9f083f service nova] Releasing lock "refresh_cache-8a00b16d-8274-4728-920b-a30e95fa4048" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 690.164797] env[61974]: DEBUG nova.compute.manager [req-5e52d65b-0dcf-4a81-bb40-b2de56ac7008 req-fa84b0bf-edc6-4323-8b5f-82c9da9f083f service nova] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Received event network-vif-deleted-95b37ae0-fbac-4f29-962e-50cd9dfdca8b {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 690.400025] env[61974]: DEBUG nova.compute.utils [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 690.405568] env[61974]: DEBUG nova.compute.manager [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 690.405760] env[61974]: DEBUG nova.network.neutron [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 690.474814] env[61974]: DEBUG nova.policy [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ed40e49316524f65bf25168ec1b257f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5dd7949c7b2540e3b6ba4c9197bb3e8b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 690.841325] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1796524a-5984-43aa-a0f2-42db12da80f1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.852549] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5707906a-b014-44a8-acbf-ecc249216bba {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.891166] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b00868-7471-482d-8dc7-7b32130fd8cc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.901796] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f9670d-b0bc-4d44-bda5-26f0f40e0b41 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.910232] env[61974]: DEBUG nova.compute.manager [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 690.916600] env[61974]: INFO nova.scheduler.client.report [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Deleted allocations for instance 59c238dd-10f0-437c-a794-79bc87f05f2e [ 690.933354] env[61974]: DEBUG nova.compute.provider_tree [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 690.963248] env[61974]: DEBUG nova.network.neutron [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Successfully created port: 47bce457-b91f-4d3d-88fb-9fdde6f7944a {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 691.253899] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Acquiring lock "b935b7e2-ba4b-452a-9eca-2fad5acc9055" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.254212] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Lock "b935b7e2-ba4b-452a-9eca-2fad5acc9055" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.436575] env[61974]: DEBUG nova.scheduler.client.report [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 691.440441] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88a42d56-7f5f-4d75-85ae-210f3e55a825 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Lock "59c238dd-10f0-437c-a794-79bc87f05f2e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.699s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.921375] env[61974]: DEBUG nova.compute.manager [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 691.942664] env[61974]: DEBUG oslo_concurrency.lockutils [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.048s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.945249] env[61974]: DEBUG nova.compute.manager [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 691.950115] env[61974]: DEBUG nova.virt.hardware [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 691.950367] env[61974]: DEBUG nova.virt.hardware [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 691.950488] env[61974]: DEBUG nova.virt.hardware [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 691.950770] env[61974]: DEBUG nova.virt.hardware [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 691.951405] env[61974]: DEBUG nova.virt.hardware [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 691.951600] env[61974]: DEBUG nova.virt.hardware [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 691.951855] env[61974]: DEBUG nova.virt.hardware [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 691.952016] env[61974]: DEBUG nova.virt.hardware [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 691.952200] env[61974]: DEBUG nova.virt.hardware [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 691.952366] env[61974]: DEBUG nova.virt.hardware [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 691.952537] env[61974]: DEBUG nova.virt.hardware [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 691.953052] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.436s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.954977] env[61974]: INFO nova.compute.claims [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 691.957702] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce83b5c6-383c-419f-ad99-a956e1d995a0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.974158] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f63205-7cd5-4128-a80c-aeba94405909 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.999760] env[61974]: INFO nova.scheduler.client.report [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Deleted allocations for instance 41fccade-6e5f-4642-8889-2ce00dbff1c7 [ 692.330055] env[61974]: ERROR nova.compute.manager [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 47bce457-b91f-4d3d-88fb-9fdde6f7944a, please check neutron logs for more information. [ 692.330055] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 692.330055] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 692.330055] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 692.330055] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 692.330055] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 692.330055] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 692.330055] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 692.330055] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 692.330055] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 692.330055] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 692.330055] env[61974]: ERROR nova.compute.manager raise self.value [ 692.330055] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 692.330055] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 692.330055] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 692.330055] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 692.330484] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 692.330484] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 692.330484] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 47bce457-b91f-4d3d-88fb-9fdde6f7944a, please check neutron logs for more information. [ 692.330484] env[61974]: ERROR nova.compute.manager [ 692.330484] env[61974]: Traceback (most recent call last): [ 692.330484] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 692.330484] env[61974]: listener.cb(fileno) [ 692.330484] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 692.330484] env[61974]: result = function(*args, **kwargs) [ 692.330484] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 692.330484] env[61974]: return func(*args, **kwargs) [ 692.330484] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 692.330484] env[61974]: raise e [ 692.330484] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 692.330484] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 692.330484] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 692.330484] env[61974]: created_port_ids = self._update_ports_for_instance( [ 692.330484] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 692.330484] env[61974]: with excutils.save_and_reraise_exception(): [ 692.330484] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 692.330484] env[61974]: self.force_reraise() [ 692.330484] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 692.330484] env[61974]: raise self.value [ 692.330484] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 692.330484] env[61974]: updated_port = self._update_port( [ 692.330484] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 692.330484] env[61974]: _ensure_no_port_binding_failure(port) [ 692.330484] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 692.330484] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 692.331195] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 47bce457-b91f-4d3d-88fb-9fdde6f7944a, please check neutron logs for more information. [ 692.331195] env[61974]: Removing descriptor: 21 [ 692.331195] env[61974]: ERROR nova.compute.manager [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 47bce457-b91f-4d3d-88fb-9fdde6f7944a, please check neutron logs for more information. [ 692.331195] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Traceback (most recent call last): [ 692.331195] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 692.331195] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] yield resources [ 692.331195] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 692.331195] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] self.driver.spawn(context, instance, image_meta, [ 692.331195] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 692.331195] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] self._vmops.spawn(context, instance, image_meta, injected_files, [ 692.331195] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 692.331195] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] vm_ref = self.build_virtual_machine(instance, [ 692.331496] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 692.331496] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] vif_infos = vmwarevif.get_vif_info(self._session, [ 692.331496] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 692.331496] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] for vif in network_info: [ 692.331496] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 692.331496] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] return self._sync_wrapper(fn, *args, **kwargs) [ 692.331496] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 692.331496] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] self.wait() [ 692.331496] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 692.331496] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] self[:] = self._gt.wait() [ 692.331496] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 692.331496] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] return self._exit_event.wait() [ 692.331496] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 692.331801] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] result = hub.switch() [ 692.331801] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 692.331801] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] return self.greenlet.switch() [ 692.331801] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 692.331801] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] result = function(*args, **kwargs) [ 692.331801] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 692.331801] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] return func(*args, **kwargs) [ 692.331801] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 692.331801] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] raise e [ 692.331801] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 692.331801] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] nwinfo = self.network_api.allocate_for_instance( [ 692.331801] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 692.331801] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] created_port_ids = self._update_ports_for_instance( [ 692.332124] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 692.332124] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] with excutils.save_and_reraise_exception(): [ 692.332124] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 692.332124] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] self.force_reraise() [ 692.332124] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 692.332124] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] raise self.value [ 692.332124] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 692.332124] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] updated_port = self._update_port( [ 692.332124] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 692.332124] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] _ensure_no_port_binding_failure(port) [ 692.332124] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 692.332124] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] raise exception.PortBindingFailed(port_id=port['id']) [ 692.332409] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] nova.exception.PortBindingFailed: Binding failed for port 47bce457-b91f-4d3d-88fb-9fdde6f7944a, please check neutron logs for more information. [ 692.332409] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] [ 692.332409] env[61974]: INFO nova.compute.manager [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Terminating instance [ 692.335691] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Acquiring lock "refresh_cache-6f9690ab-8218-4b2c-ba36-682ea7398209" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.336942] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Acquired lock "refresh_cache-6f9690ab-8218-4b2c-ba36-682ea7398209" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.336942] env[61974]: DEBUG nova.network.neutron [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 692.447088] env[61974]: DEBUG nova.compute.manager [req-7403a6c2-274f-410e-8c5e-1e881fdf21f2 req-eb7366d4-76c0-4277-b2bf-18bcc501ca97 service nova] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Received event network-changed-47bce457-b91f-4d3d-88fb-9fdde6f7944a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 692.447088] env[61974]: DEBUG nova.compute.manager [req-7403a6c2-274f-410e-8c5e-1e881fdf21f2 req-eb7366d4-76c0-4277-b2bf-18bcc501ca97 service nova] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Refreshing instance network info cache due to event network-changed-47bce457-b91f-4d3d-88fb-9fdde6f7944a. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 692.447088] env[61974]: DEBUG oslo_concurrency.lockutils [req-7403a6c2-274f-410e-8c5e-1e881fdf21f2 req-eb7366d4-76c0-4277-b2bf-18bcc501ca97 service nova] Acquiring lock "refresh_cache-6f9690ab-8218-4b2c-ba36-682ea7398209" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.487191] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.511771] env[61974]: DEBUG oslo_concurrency.lockutils [None req-86c9c4e5-51b3-4f82-ae55-7a22386f03d6 tempest-ServerDiagnosticsV248Test-1074983117 tempest-ServerDiagnosticsV248Test-1074983117-project-member] Lock "41fccade-6e5f-4642-8889-2ce00dbff1c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.230s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 692.745753] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "4628f895-1ae5-4d25-8095-f892b86769f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.745931] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "4628f895-1ae5-4d25-8095-f892b86769f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 692.863120] env[61974]: DEBUG nova.network.neutron [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 692.926547] env[61974]: DEBUG nova.network.neutron [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.422846] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237bba22-cc6b-4fb8-8c0f-f2835a180a84 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.430258] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Releasing lock "refresh_cache-6f9690ab-8218-4b2c-ba36-682ea7398209" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.430772] env[61974]: DEBUG nova.compute.manager [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 693.431063] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 693.432306] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16cfb30b-7a3b-4b50-8945-9c940a7fb310 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.437320] env[61974]: DEBUG oslo_concurrency.lockutils [req-7403a6c2-274f-410e-8c5e-1e881fdf21f2 req-eb7366d4-76c0-4277-b2bf-18bcc501ca97 service nova] Acquired lock "refresh_cache-6f9690ab-8218-4b2c-ba36-682ea7398209" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.438451] env[61974]: DEBUG nova.network.neutron [req-7403a6c2-274f-410e-8c5e-1e881fdf21f2 req-eb7366d4-76c0-4277-b2bf-18bcc501ca97 service nova] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Refreshing network info cache for port 47bce457-b91f-4d3d-88fb-9fdde6f7944a {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 693.439572] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a51958e-d8aa-4762-9923-ac50a8ca5c05 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.477429] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705e8608-1c94-44c7-b3df-1b1d8ff9f0e2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.484576] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a361aa-dec9-4dc8-8ea6-5a2a2fb3a1cc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.505753] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e487b72-90c0-45db-b9ae-dff0edd034f3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.515682] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6f9690ab-8218-4b2c-ba36-682ea7398209 could not be found. [ 693.515969] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 693.516216] env[61974]: INFO nova.compute.manager [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Took 0.09 seconds to destroy the instance on the hypervisor. [ 693.516512] env[61974]: DEBUG oslo.service.loopingcall [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 693.517218] env[61974]: DEBUG nova.compute.manager [-] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 693.517419] env[61974]: DEBUG nova.network.neutron [-] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 693.527704] env[61974]: DEBUG nova.compute.provider_tree [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 693.537962] env[61974]: DEBUG nova.network.neutron [-] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.613036] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Acquiring lock "90fd5720-923c-4243-9f62-908e35fe35a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.613036] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Lock "90fd5720-923c-4243-9f62-908e35fe35a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.724041] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Acquiring lock "e6bc38d5-056f-40c2-a2ed-467200da2738" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.724283] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Lock "e6bc38d5-056f-40c2-a2ed-467200da2738" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.960139] env[61974]: DEBUG nova.network.neutron [req-7403a6c2-274f-410e-8c5e-1e881fdf21f2 req-eb7366d4-76c0-4277-b2bf-18bcc501ca97 service nova] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 694.031251] env[61974]: DEBUG nova.scheduler.client.report [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 694.035846] env[61974]: DEBUG nova.network.neutron [req-7403a6c2-274f-410e-8c5e-1e881fdf21f2 req-eb7366d4-76c0-4277-b2bf-18bcc501ca97 service nova] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.041054] env[61974]: DEBUG nova.network.neutron [-] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.476456] env[61974]: DEBUG nova.compute.manager [req-d0ee7100-a569-463e-93cf-c2d915177e24 req-d564aec8-f280-4cc7-bab4-bb5e1714df13 service nova] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Received event network-vif-deleted-47bce457-b91f-4d3d-88fb-9fdde6f7944a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 694.539662] env[61974]: DEBUG oslo_concurrency.lockutils [req-7403a6c2-274f-410e-8c5e-1e881fdf21f2 req-eb7366d4-76c0-4277-b2bf-18bcc501ca97 service nova] Releasing lock "refresh_cache-6f9690ab-8218-4b2c-ba36-682ea7398209" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 694.540854] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.587s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 694.541107] env[61974]: DEBUG nova.compute.manager [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 694.543817] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.524s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.544994] env[61974]: INFO nova.compute.claims [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 694.547780] env[61974]: INFO nova.compute.manager [-] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Took 1.03 seconds to deallocate network for instance. [ 694.550671] env[61974]: DEBUG nova.compute.claims [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 694.550747] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.052345] env[61974]: DEBUG nova.compute.utils [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 695.059020] env[61974]: DEBUG nova.compute.manager [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 695.059020] env[61974]: DEBUG nova.network.neutron [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 695.124414] env[61974]: DEBUG nova.policy [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e53bba18a479426b8f517a62d7e2cfde', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ebf9a1df54af49f99767ece26bb1a731', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 695.560593] env[61974]: DEBUG nova.compute.manager [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 695.769362] env[61974]: DEBUG nova.network.neutron [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Successfully created port: 51a2c2ab-d715-49cf-a9e0-431b116e7551 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 696.012101] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6089478-ae8f-4172-bd6f-67f87fed7462 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.019676] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da147466-d049-4006-9b2f-022bd31778b5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.052916] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c862cbde-095e-4bc4-b3d1-4e319957af06 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.064225] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f05ded-1f0a-4e56-8378-2c11c0567317 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.083141] env[61974]: DEBUG nova.compute.provider_tree [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 696.576012] env[61974]: DEBUG nova.compute.manager [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 696.587268] env[61974]: DEBUG nova.scheduler.client.report [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 696.608816] env[61974]: DEBUG nova.virt.hardware [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 696.609121] env[61974]: DEBUG nova.virt.hardware [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 696.609378] env[61974]: DEBUG nova.virt.hardware [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 696.609568] env[61974]: DEBUG nova.virt.hardware [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 696.609970] env[61974]: DEBUG nova.virt.hardware [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 696.610247] env[61974]: DEBUG nova.virt.hardware [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 696.610669] env[61974]: DEBUG nova.virt.hardware [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 696.610911] env[61974]: DEBUG nova.virt.hardware [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 696.611286] env[61974]: DEBUG nova.virt.hardware [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 696.611396] env[61974]: DEBUG nova.virt.hardware [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 696.611621] env[61974]: DEBUG nova.virt.hardware [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 696.612538] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599d0156-4009-4ea2-a466-11b933c55ca7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.625021] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed5fdbc-3425-4c58-a122-89e6bec68f1a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.095144] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.551s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.095578] env[61974]: DEBUG nova.compute.manager [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 697.099866] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.495s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.101303] env[61974]: INFO nova.compute.claims [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 697.147799] env[61974]: DEBUG nova.compute.manager [req-cd607169-12d7-4888-b920-09b79dcbdc15 req-a47e88c5-b9da-4c1f-b866-f48f3969350d service nova] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Received event network-changed-51a2c2ab-d715-49cf-a9e0-431b116e7551 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 697.148073] env[61974]: DEBUG nova.compute.manager [req-cd607169-12d7-4888-b920-09b79dcbdc15 req-a47e88c5-b9da-4c1f-b866-f48f3969350d service nova] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Refreshing instance network info cache due to event network-changed-51a2c2ab-d715-49cf-a9e0-431b116e7551. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 697.148381] env[61974]: DEBUG oslo_concurrency.lockutils [req-cd607169-12d7-4888-b920-09b79dcbdc15 req-a47e88c5-b9da-4c1f-b866-f48f3969350d service nova] Acquiring lock "refresh_cache-26cb158a-04fa-4031-b099-34dfe8a762cc" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.148625] env[61974]: DEBUG oslo_concurrency.lockutils [req-cd607169-12d7-4888-b920-09b79dcbdc15 req-a47e88c5-b9da-4c1f-b866-f48f3969350d service nova] Acquired lock "refresh_cache-26cb158a-04fa-4031-b099-34dfe8a762cc" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.148739] env[61974]: DEBUG nova.network.neutron [req-cd607169-12d7-4888-b920-09b79dcbdc15 req-a47e88c5-b9da-4c1f-b866-f48f3969350d service nova] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Refreshing network info cache for port 51a2c2ab-d715-49cf-a9e0-431b116e7551 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 697.308551] env[61974]: ERROR nova.compute.manager [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 51a2c2ab-d715-49cf-a9e0-431b116e7551, please check neutron logs for more information. [ 697.308551] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 697.308551] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.308551] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 697.308551] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 697.308551] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 697.308551] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 697.308551] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 697.308551] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.308551] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 697.308551] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.308551] env[61974]: ERROR nova.compute.manager raise self.value [ 697.308551] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 697.308551] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 697.308551] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.308551] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 697.309113] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.309113] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 697.309113] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 51a2c2ab-d715-49cf-a9e0-431b116e7551, please check neutron logs for more information. [ 697.309113] env[61974]: ERROR nova.compute.manager [ 697.309113] env[61974]: Traceback (most recent call last): [ 697.309113] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 697.309113] env[61974]: listener.cb(fileno) [ 697.309113] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 697.309113] env[61974]: result = function(*args, **kwargs) [ 697.309113] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 697.309113] env[61974]: return func(*args, **kwargs) [ 697.309113] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 697.309113] env[61974]: raise e [ 697.309113] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.309113] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 697.309113] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 697.309113] env[61974]: created_port_ids = self._update_ports_for_instance( [ 697.309113] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 697.309113] env[61974]: with excutils.save_and_reraise_exception(): [ 697.309113] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.309113] env[61974]: self.force_reraise() [ 697.309113] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.309113] env[61974]: raise self.value [ 697.309113] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 697.309113] env[61974]: updated_port = self._update_port( [ 697.309113] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.309113] env[61974]: _ensure_no_port_binding_failure(port) [ 697.309113] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.309113] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 697.310038] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 51a2c2ab-d715-49cf-a9e0-431b116e7551, please check neutron logs for more information. [ 697.310038] env[61974]: Removing descriptor: 21 [ 697.310038] env[61974]: ERROR nova.compute.manager [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 51a2c2ab-d715-49cf-a9e0-431b116e7551, please check neutron logs for more information. [ 697.310038] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Traceback (most recent call last): [ 697.310038] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 697.310038] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] yield resources [ 697.310038] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 697.310038] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] self.driver.spawn(context, instance, image_meta, [ 697.310038] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 697.310038] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 697.310038] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 697.310038] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] vm_ref = self.build_virtual_machine(instance, [ 697.310437] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 697.310437] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] vif_infos = vmwarevif.get_vif_info(self._session, [ 697.310437] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 697.310437] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] for vif in network_info: [ 697.310437] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 697.310437] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] return self._sync_wrapper(fn, *args, **kwargs) [ 697.310437] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 697.310437] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] self.wait() [ 697.310437] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 697.310437] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] self[:] = self._gt.wait() [ 697.310437] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 697.310437] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] return self._exit_event.wait() [ 697.310437] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 697.310746] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] result = hub.switch() [ 697.310746] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 697.310746] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] return self.greenlet.switch() [ 697.310746] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 697.310746] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] result = function(*args, **kwargs) [ 697.310746] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 697.310746] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] return func(*args, **kwargs) [ 697.310746] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 697.310746] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] raise e [ 697.310746] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.310746] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] nwinfo = self.network_api.allocate_for_instance( [ 697.310746] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 697.310746] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] created_port_ids = self._update_ports_for_instance( [ 697.311047] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 697.311047] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] with excutils.save_and_reraise_exception(): [ 697.311047] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.311047] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] self.force_reraise() [ 697.311047] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.311047] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] raise self.value [ 697.311047] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 697.311047] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] updated_port = self._update_port( [ 697.311047] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.311047] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] _ensure_no_port_binding_failure(port) [ 697.311047] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.311047] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] raise exception.PortBindingFailed(port_id=port['id']) [ 697.311493] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] nova.exception.PortBindingFailed: Binding failed for port 51a2c2ab-d715-49cf-a9e0-431b116e7551, please check neutron logs for more information. [ 697.311493] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] [ 697.311493] env[61974]: INFO nova.compute.manager [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Terminating instance [ 697.312595] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Acquiring lock "refresh_cache-26cb158a-04fa-4031-b099-34dfe8a762cc" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.612799] env[61974]: DEBUG nova.compute.utils [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 697.614417] env[61974]: DEBUG nova.compute.manager [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Not allocating networking since 'none' was specified. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 697.675173] env[61974]: DEBUG nova.network.neutron [req-cd607169-12d7-4888-b920-09b79dcbdc15 req-a47e88c5-b9da-4c1f-b866-f48f3969350d service nova] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 697.758390] env[61974]: DEBUG nova.network.neutron [req-cd607169-12d7-4888-b920-09b79dcbdc15 req-a47e88c5-b9da-4c1f-b866-f48f3969350d service nova] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.115284] env[61974]: DEBUG nova.compute.manager [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 698.261711] env[61974]: DEBUG oslo_concurrency.lockutils [req-cd607169-12d7-4888-b920-09b79dcbdc15 req-a47e88c5-b9da-4c1f-b866-f48f3969350d service nova] Releasing lock "refresh_cache-26cb158a-04fa-4031-b099-34dfe8a762cc" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.262200] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Acquired lock "refresh_cache-26cb158a-04fa-4031-b099-34dfe8a762cc" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.262464] env[61974]: DEBUG nova.network.neutron [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 698.529776] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02287b6e-f5c3-4656-91ce-112117cd6260 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.538588] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f773118-99a1-496e-b129-96654f2fcc6f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.574988] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67769d8d-87f1-4ea2-bf87-476b86ef4c86 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.584277] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79eaa962-9b5e-4b97-9ced-2459f5f2bd55 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.599360] env[61974]: DEBUG nova.compute.provider_tree [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.791234] env[61974]: DEBUG nova.network.neutron [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 698.878155] env[61974]: DEBUG nova.network.neutron [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.102920] env[61974]: DEBUG nova.scheduler.client.report [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 699.115167] env[61974]: DEBUG oslo_concurrency.lockutils [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Acquiring lock "2601b97a-8ef6-4b61-b0e0-dd6c7c203206" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 699.115420] env[61974]: DEBUG oslo_concurrency.lockutils [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Lock "2601b97a-8ef6-4b61-b0e0-dd6c7c203206" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.127537] env[61974]: DEBUG nova.compute.manager [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 699.152430] env[61974]: DEBUG nova.virt.hardware [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 699.153555] env[61974]: DEBUG nova.virt.hardware [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 699.153959] env[61974]: DEBUG nova.virt.hardware [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 699.153959] env[61974]: DEBUG nova.virt.hardware [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 699.154084] env[61974]: DEBUG nova.virt.hardware [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 699.154224] env[61974]: DEBUG nova.virt.hardware [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 699.154435] env[61974]: DEBUG nova.virt.hardware [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 699.154602] env[61974]: DEBUG nova.virt.hardware [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 699.154773] env[61974]: DEBUG nova.virt.hardware [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 699.154928] env[61974]: DEBUG nova.virt.hardware [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 699.155117] env[61974]: DEBUG nova.virt.hardware [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 699.156556] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28655947-9e0d-48d6-a22c-b3d913538913 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.165416] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83916f0b-fa65-4117-94d8-0fa35cc9e7a6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.181648] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Instance VIF info [] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 699.187272] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Creating folder: Project (61df9e215227463f8b0a949963e6143c). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 699.188528] env[61974]: DEBUG nova.compute.manager [req-316c8c89-c3ab-4d1a-9ff9-45829a120c1d req-c665c5e7-22a4-4ae4-b9dc-35e27ca8a9f4 service nova] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Received event network-vif-deleted-51a2c2ab-d715-49cf-a9e0-431b116e7551 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 699.189103] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9de91f8e-3cf1-47b6-a8fb-3cc064c64ddf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.200393] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Created folder: Project (61df9e215227463f8b0a949963e6143c) in parent group-v292912. [ 699.200583] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Creating folder: Instances. Parent ref: group-v292923. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 699.200815] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-03c5d40f-f8f9-4434-815e-972a8b3b96ea {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.210739] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Created folder: Instances in parent group-v292923. [ 699.210982] env[61974]: DEBUG oslo.service.loopingcall [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 699.211187] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 699.211386] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f55da807-0985-41ef-98a7-4f77c46ca7ae {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.228145] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 699.228145] env[61974]: value = "task-1378914" [ 699.228145] env[61974]: _type = "Task" [ 699.228145] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.235745] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1378914, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.381063] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Releasing lock "refresh_cache-26cb158a-04fa-4031-b099-34dfe8a762cc" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 699.382673] env[61974]: DEBUG nova.compute.manager [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 699.382673] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 699.382673] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6b644f6c-8b3b-4bf7-8b08-40be9eea76f8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.392038] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac91ae26-c1ea-4014-bc07-088209063bac {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.416629] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 26cb158a-04fa-4031-b099-34dfe8a762cc could not be found. [ 699.416943] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 699.417235] env[61974]: INFO nova.compute.manager [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Took 0.04 seconds to destroy the instance on the hypervisor. [ 699.417560] env[61974]: DEBUG oslo.service.loopingcall [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 699.417845] env[61974]: DEBUG nova.compute.manager [-] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 699.417973] env[61974]: DEBUG nova.network.neutron [-] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 699.435199] env[61974]: DEBUG nova.network.neutron [-] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 699.612775] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.514s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.613325] env[61974]: DEBUG nova.compute.manager [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 699.615921] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 18.816s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.740414] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1378914, 'name': CreateVM_Task, 'duration_secs': 0.312095} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.740589] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 699.741457] env[61974]: DEBUG oslo_vmware.service [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95dab6eb-f684-490e-b58f-52f2c3d55318 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.748023] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.748208] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.748569] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 699.748821] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4272cab1-25b2-4f7e-8df5-449182f35b1d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.753632] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 699.753632] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5280d4d0-8ad0-2def-370c-70c12cbcb3b1" [ 699.753632] env[61974]: _type = "Task" [ 699.753632] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.762422] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5280d4d0-8ad0-2def-370c-70c12cbcb3b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.940593] env[61974]: DEBUG nova.network.neutron [-] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.119564] env[61974]: DEBUG nova.compute.utils [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 700.124268] env[61974]: DEBUG nova.compute.manager [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 700.124479] env[61974]: DEBUG nova.network.neutron [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 700.179208] env[61974]: DEBUG nova.policy [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e53bba18a479426b8f517a62d7e2cfde', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ebf9a1df54af49f99767ece26bb1a731', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 700.269026] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.269026] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 700.269026] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.269026] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.269697] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 700.269697] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f8589f4b-fe10-4c59-bacf-affcbf0f51eb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.281173] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 700.281871] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 700.282870] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85794268-4ae7-4b42-938c-cecba8e06615 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.292984] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbceee64-1eff-4564-bd25-68d8dd79322c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.301710] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 700.301710] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52f4a621-c1f5-6210-d07c-6f04b4c1b023" [ 700.301710] env[61974]: _type = "Task" [ 700.301710] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.313195] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52f4a621-c1f5-6210-d07c-6f04b4c1b023, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.446136] env[61974]: DEBUG nova.network.neutron [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Successfully created port: 66718558-8503-4606-adc8-14e9b6d34c45 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 700.446136] env[61974]: INFO nova.compute.manager [-] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Took 1.03 seconds to deallocate network for instance. [ 700.448276] env[61974]: DEBUG nova.compute.claims [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 700.448684] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.625470] env[61974]: DEBUG nova.compute.manager [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 700.667017] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 700.667200] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 9d599717-0bda-4996-89d8-c41ce089eaac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 700.667338] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 8a00b16d-8274-4728-920b-a30e95fa4048 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 700.667458] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 26cb158a-04fa-4031-b099-34dfe8a762cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 700.667573] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 6f9690ab-8218-4b2c-ba36-682ea7398209 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 700.667730] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 93a736b5-5423-4378-8b0c-73a0c46414ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 700.667891] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 67ca9fb2-9ca0-4fca-956e-961d5011df35 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 700.818059] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Preparing fetch location {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 700.818355] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Creating directory with path [datastore2] vmware_temp/a5f4eaef-bc00-482e-99b2-6ed36facd00c/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 700.818786] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-da202279-dbd4-4264-a3d9-15674c433daa {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.845041] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Created directory with path [datastore2] vmware_temp/a5f4eaef-bc00-482e-99b2-6ed36facd00c/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 700.845041] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Fetch image to [datastore2] vmware_temp/a5f4eaef-bc00-482e-99b2-6ed36facd00c/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/tmp-sparse.vmdk {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 700.845213] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Downloading image file data 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 to [datastore2] vmware_temp/a5f4eaef-bc00-482e-99b2-6ed36facd00c/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/tmp-sparse.vmdk on the data store datastore2 {{(pid=61974) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 700.846606] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f161dc-c667-4221-bdff-43dd332e7ab5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.858860] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28b57694-f7e7-4a5c-8417-0cdb81d54fa5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.871891] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd3ee794-8cf8-4474-aa75-8d670c7fa18e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.914650] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a295ccbe-c785-45fa-acf8-3d9f94b74f85 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.922116] env[61974]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ae07d6bf-ffc7-4632-bdb5-8d3087cecf63 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.961957] env[61974]: DEBUG nova.virt.vmwareapi.images [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Downloading image file data 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 to the data store datastore2 {{(pid=61974) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 701.030176] env[61974]: DEBUG oslo_vmware.rw_handles [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a5f4eaef-bc00-482e-99b2-6ed36facd00c/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61974) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 701.172953] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance ccc4d6d9-979a-468a-9b7a-4633662c4052 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.436649] env[61974]: DEBUG nova.compute.manager [req-b24968cb-ad31-438b-abc9-ba0b560aa54a req-7c0b8569-5c52-4f62-a576-b3898ef7df72 service nova] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Received event network-changed-66718558-8503-4606-adc8-14e9b6d34c45 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 701.436989] env[61974]: DEBUG nova.compute.manager [req-b24968cb-ad31-438b-abc9-ba0b560aa54a req-7c0b8569-5c52-4f62-a576-b3898ef7df72 service nova] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Refreshing instance network info cache due to event network-changed-66718558-8503-4606-adc8-14e9b6d34c45. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 701.437286] env[61974]: DEBUG oslo_concurrency.lockutils [req-b24968cb-ad31-438b-abc9-ba0b560aa54a req-7c0b8569-5c52-4f62-a576-b3898ef7df72 service nova] Acquiring lock "refresh_cache-67ca9fb2-9ca0-4fca-956e-961d5011df35" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 701.437479] env[61974]: DEBUG oslo_concurrency.lockutils [req-b24968cb-ad31-438b-abc9-ba0b560aa54a req-7c0b8569-5c52-4f62-a576-b3898ef7df72 service nova] Acquired lock "refresh_cache-67ca9fb2-9ca0-4fca-956e-961d5011df35" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.437730] env[61974]: DEBUG nova.network.neutron [req-b24968cb-ad31-438b-abc9-ba0b560aa54a req-7c0b8569-5c52-4f62-a576-b3898ef7df72 service nova] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Refreshing network info cache for port 66718558-8503-4606-adc8-14e9b6d34c45 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 701.634334] env[61974]: ERROR nova.compute.manager [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 66718558-8503-4606-adc8-14e9b6d34c45, please check neutron logs for more information. [ 701.634334] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 701.634334] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 701.634334] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 701.634334] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 701.634334] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 701.634334] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 701.634334] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 701.634334] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 701.634334] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 701.634334] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 701.634334] env[61974]: ERROR nova.compute.manager raise self.value [ 701.634334] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 701.634334] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 701.634334] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 701.634334] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 701.634860] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 701.634860] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 701.634860] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 66718558-8503-4606-adc8-14e9b6d34c45, please check neutron logs for more information. [ 701.634860] env[61974]: ERROR nova.compute.manager [ 701.634860] env[61974]: Traceback (most recent call last): [ 701.634860] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 701.634860] env[61974]: listener.cb(fileno) [ 701.634860] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 701.634860] env[61974]: result = function(*args, **kwargs) [ 701.634860] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 701.634860] env[61974]: return func(*args, **kwargs) [ 701.634860] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 701.634860] env[61974]: raise e [ 701.634860] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 701.634860] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 701.634860] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 701.634860] env[61974]: created_port_ids = self._update_ports_for_instance( [ 701.634860] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 701.634860] env[61974]: with excutils.save_and_reraise_exception(): [ 701.634860] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 701.634860] env[61974]: self.force_reraise() [ 701.634860] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 701.634860] env[61974]: raise self.value [ 701.634860] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 701.634860] env[61974]: updated_port = self._update_port( [ 701.634860] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 701.634860] env[61974]: _ensure_no_port_binding_failure(port) [ 701.634860] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 701.634860] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 701.635703] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 66718558-8503-4606-adc8-14e9b6d34c45, please check neutron logs for more information. [ 701.635703] env[61974]: Removing descriptor: 21 [ 701.637229] env[61974]: DEBUG nova.compute.manager [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 701.659421] env[61974]: DEBUG nova.virt.hardware [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 701.659719] env[61974]: DEBUG nova.virt.hardware [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 701.659940] env[61974]: DEBUG nova.virt.hardware [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 701.660191] env[61974]: DEBUG nova.virt.hardware [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 701.660351] env[61974]: DEBUG nova.virt.hardware [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 701.660518] env[61974]: DEBUG nova.virt.hardware [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 701.660749] env[61974]: DEBUG nova.virt.hardware [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 701.660967] env[61974]: DEBUG nova.virt.hardware [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 701.661195] env[61974]: DEBUG nova.virt.hardware [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 701.661370] env[61974]: DEBUG nova.virt.hardware [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 701.661591] env[61974]: DEBUG nova.virt.hardware [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 701.663116] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e40beaa0-2a3c-43bf-a793-b76bed15b81d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.675048] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a864997-50b9-475a-ac97-68d95686ac96 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.680379] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance c3eb4869-0bde-4398-bf34-3ee6073174e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 701.692780] env[61974]: ERROR nova.compute.manager [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 66718558-8503-4606-adc8-14e9b6d34c45, please check neutron logs for more information. [ 701.692780] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Traceback (most recent call last): [ 701.692780] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 701.692780] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] yield resources [ 701.692780] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 701.692780] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] self.driver.spawn(context, instance, image_meta, [ 701.692780] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 701.692780] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] self._vmops.spawn(context, instance, image_meta, injected_files, [ 701.692780] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 701.692780] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] vm_ref = self.build_virtual_machine(instance, [ 701.692780] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 701.693403] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] vif_infos = vmwarevif.get_vif_info(self._session, [ 701.693403] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 701.693403] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] for vif in network_info: [ 701.693403] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 701.693403] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] return self._sync_wrapper(fn, *args, **kwargs) [ 701.693403] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 701.693403] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] self.wait() [ 701.693403] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 701.693403] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] self[:] = self._gt.wait() [ 701.693403] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 701.693403] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] return self._exit_event.wait() [ 701.693403] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 701.693403] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] current.throw(*self._exc) [ 701.693797] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 701.693797] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] result = function(*args, **kwargs) [ 701.693797] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 701.693797] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] return func(*args, **kwargs) [ 701.693797] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 701.693797] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] raise e [ 701.693797] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 701.693797] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] nwinfo = self.network_api.allocate_for_instance( [ 701.693797] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 701.693797] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] created_port_ids = self._update_ports_for_instance( [ 701.693797] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 701.693797] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] with excutils.save_and_reraise_exception(): [ 701.693797] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 701.694171] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] self.force_reraise() [ 701.694171] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 701.694171] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] raise self.value [ 701.694171] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 701.694171] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] updated_port = self._update_port( [ 701.694171] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 701.694171] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] _ensure_no_port_binding_failure(port) [ 701.694171] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 701.694171] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] raise exception.PortBindingFailed(port_id=port['id']) [ 701.694171] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] nova.exception.PortBindingFailed: Binding failed for port 66718558-8503-4606-adc8-14e9b6d34c45, please check neutron logs for more information. [ 701.694171] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] [ 701.694171] env[61974]: INFO nova.compute.manager [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Terminating instance [ 701.698241] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Acquiring lock "refresh_cache-67ca9fb2-9ca0-4fca-956e-961d5011df35" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 701.706908] env[61974]: DEBUG oslo_vmware.rw_handles [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Completed reading data from the image iterator. {{(pid=61974) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 701.707118] env[61974]: DEBUG oslo_vmware.rw_handles [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a5f4eaef-bc00-482e-99b2-6ed36facd00c/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 701.834095] env[61974]: DEBUG nova.virt.vmwareapi.images [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Downloaded image file data 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 to vmware_temp/a5f4eaef-bc00-482e-99b2-6ed36facd00c/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/tmp-sparse.vmdk on the data store datastore2 {{(pid=61974) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 701.835789] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Caching image {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 701.836351] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Copying Virtual Disk [datastore2] vmware_temp/a5f4eaef-bc00-482e-99b2-6ed36facd00c/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/tmp-sparse.vmdk to [datastore2] vmware_temp/a5f4eaef-bc00-482e-99b2-6ed36facd00c/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 701.836455] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df967a53-f523-4c89-8556-b7602ac0bf23 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.846576] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 701.846576] env[61974]: value = "task-1378916" [ 701.846576] env[61974]: _type = "Task" [ 701.846576] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.854926] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378916, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.956118] env[61974]: DEBUG nova.network.neutron [req-b24968cb-ad31-438b-abc9-ba0b560aa54a req-7c0b8569-5c52-4f62-a576-b3898ef7df72 service nova] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 702.014200] env[61974]: DEBUG nova.network.neutron [req-b24968cb-ad31-438b-abc9-ba0b560aa54a req-7c0b8569-5c52-4f62-a576-b3898ef7df72 service nova] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.183433] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 702.357126] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378916, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.517552] env[61974]: DEBUG oslo_concurrency.lockutils [req-b24968cb-ad31-438b-abc9-ba0b560aa54a req-7c0b8569-5c52-4f62-a576-b3898ef7df72 service nova] Releasing lock "refresh_cache-67ca9fb2-9ca0-4fca-956e-961d5011df35" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 702.518015] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Acquired lock "refresh_cache-67ca9fb2-9ca0-4fca-956e-961d5011df35" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.518213] env[61974]: DEBUG nova.network.neutron [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 702.686886] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 9f781418-6149-4c73-aaa0-20c8cbc8c482 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 702.859564] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378916, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.645385} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.859919] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Copied Virtual Disk [datastore2] vmware_temp/a5f4eaef-bc00-482e-99b2-6ed36facd00c/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/tmp-sparse.vmdk to [datastore2] vmware_temp/a5f4eaef-bc00-482e-99b2-6ed36facd00c/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 702.860169] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Deleting the datastore file [datastore2] vmware_temp/a5f4eaef-bc00-482e-99b2-6ed36facd00c/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/tmp-sparse.vmdk {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 702.861166] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-696aa093-530a-4caa-af12-bc96f278b531 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.870065] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 702.870065] env[61974]: value = "task-1378918" [ 702.870065] env[61974]: _type = "Task" [ 702.870065] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.880500] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378918, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.037777] env[61974]: DEBUG nova.network.neutron [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 703.090964] env[61974]: DEBUG nova.network.neutron [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.189940] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.381058] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378918, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.027302} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.381058] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 703.381196] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Moving file from [datastore2] vmware_temp/a5f4eaef-bc00-482e-99b2-6ed36facd00c/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 to [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8. {{(pid=61974) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 703.381392] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-f9b3ee3a-29e6-47ba-9088-c0029687f56b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.388517] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 703.388517] env[61974]: value = "task-1378919" [ 703.388517] env[61974]: _type = "Task" [ 703.388517] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.396547] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378919, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.460741] env[61974]: DEBUG nova.compute.manager [req-ef5a2cbe-d811-41c0-97b0-517c798ec671 req-ce661203-d46e-4713-8240-d97716eae568 service nova] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Received event network-vif-deleted-66718558-8503-4606-adc8-14e9b6d34c45 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 703.593414] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Releasing lock "refresh_cache-67ca9fb2-9ca0-4fca-956e-961d5011df35" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 703.593852] env[61974]: DEBUG nova.compute.manager [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 703.594059] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 703.594365] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8f2ee51e-9028-4210-97db-ace6fc534419 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.604014] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c89ea3-ca60-4523-8536-bdfc2f6eb2e5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.628324] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 67ca9fb2-9ca0-4fca-956e-961d5011df35 could not be found. [ 703.628559] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 703.628744] env[61974]: INFO nova.compute.manager [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Took 0.03 seconds to destroy the instance on the hypervisor. [ 703.628986] env[61974]: DEBUG oslo.service.loopingcall [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 703.629216] env[61974]: DEBUG nova.compute.manager [-] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 703.629312] env[61974]: DEBUG nova.network.neutron [-] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 703.644714] env[61974]: DEBUG nova.network.neutron [-] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 703.694746] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 99a03824-dd33-4916-84f7-4c911a98c9d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 703.900716] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378919, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.026494} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.901021] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] File moved {{(pid=61974) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 703.901224] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Cleaning up location [datastore2] vmware_temp/a5f4eaef-bc00-482e-99b2-6ed36facd00c {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 703.901384] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Deleting the datastore file [datastore2] vmware_temp/a5f4eaef-bc00-482e-99b2-6ed36facd00c {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 703.901629] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54ee7131-09a7-4888-a447-f0391315baec {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.908907] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 703.908907] env[61974]: value = "task-1378920" [ 703.908907] env[61974]: _type = "Task" [ 703.908907] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.916928] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378920, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.147046] env[61974]: DEBUG nova.network.neutron [-] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.197529] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 704.421207] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378920, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.027985} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.421207] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 704.421207] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca1d70cb-86ac-4747-9388-269f7e65a9ef {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.426700] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 704.426700] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52e07fe8-9dc9-032b-86c4-af138d8d7d0b" [ 704.426700] env[61974]: _type = "Task" [ 704.426700] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.435069] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52e07fe8-9dc9-032b-86c4-af138d8d7d0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.651716] env[61974]: INFO nova.compute.manager [-] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Took 1.02 seconds to deallocate network for instance. [ 704.652929] env[61974]: DEBUG nova.compute.claims [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 704.652929] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.701066] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance a333f129-6a86-4715-83e2-79543620d013 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 704.937499] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52e07fe8-9dc9-032b-86c4-af138d8d7d0b, 'name': SearchDatastore_Task, 'duration_secs': 0.009202} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.937808] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 704.938056] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 93a736b5-5423-4378-8b0c-73a0c46414ca/93a736b5-5423-4378-8b0c-73a0c46414ca.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 704.938320] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc1159ba-b5fb-42a7-821f-c7531e5a8d8a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.945617] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 704.945617] env[61974]: value = "task-1378922" [ 704.945617] env[61974]: _type = "Task" [ 704.945617] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.954407] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378922, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.205050] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 7a465c7e-874d-4cd1-9c23-0ae249997114 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 705.456608] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378922, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460196} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.456923] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 93a736b5-5423-4378-8b0c-73a0c46414ca/93a736b5-5423-4378-8b0c-73a0c46414ca.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 705.457703] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 705.457703] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-71c335ce-f805-4bac-8979-4c8a8ca52410 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.464279] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 705.464279] env[61974]: value = "task-1378923" [ 705.464279] env[61974]: _type = "Task" [ 705.464279] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.473614] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378923, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.708703] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 2e217cbc-4962-44c7-b054-b3ae135ef8bb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 705.974197] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378923, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059835} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.974452] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 705.975269] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-988c6979-017c-474c-98c8-80c1376dad52 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.995884] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Reconfiguring VM instance instance-0000001a to attach disk [datastore2] 93a736b5-5423-4378-8b0c-73a0c46414ca/93a736b5-5423-4378-8b0c-73a0c46414ca.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 705.996178] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b81a0e8b-f283-4128-abbc-154c25736246 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.015980] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 706.015980] env[61974]: value = "task-1378924" [ 706.015980] env[61974]: _type = "Task" [ 706.015980] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.025413] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378924, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.212627] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance d334c2d8-15d8-4f70-9a85-312687d1b337 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 706.526538] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378924, 'name': ReconfigVM_Task, 'duration_secs': 0.28686} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.526974] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Reconfigured VM instance instance-0000001a to attach disk [datastore2] 93a736b5-5423-4378-8b0c-73a0c46414ca/93a736b5-5423-4378-8b0c-73a0c46414ca.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 706.527661] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-be8b743f-1ea2-4591-a6b9-dbe3f1c7cf17 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.534929] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 706.534929] env[61974]: value = "task-1378925" [ 706.534929] env[61974]: _type = "Task" [ 706.534929] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.543551] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378925, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.714993] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance a1c488d6-4eb4-4362-84cd-68151a47d3bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 707.047978] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378925, 'name': Rename_Task, 'duration_secs': 0.131462} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.048391] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 707.048654] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d15691c-0268-46cf-950b-e96a1729f88f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.055960] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 707.055960] env[61974]: value = "task-1378927" [ 707.055960] env[61974]: _type = "Task" [ 707.055960] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.064133] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378927, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.220703] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 707.568233] env[61974]: DEBUG oslo_vmware.api [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378927, 'name': PowerOnVM_Task, 'duration_secs': 0.473363} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.568559] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 707.568805] env[61974]: INFO nova.compute.manager [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Took 8.44 seconds to spawn the instance on the hypervisor. [ 707.569007] env[61974]: DEBUG nova.compute.manager [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 707.569904] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278f23e4-520b-4db5-9bfa-6a258efd0746 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.723903] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 30455d07-4826-4561-a04f-1b4a2041402c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.087712] env[61974]: INFO nova.compute.manager [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Took 32.10 seconds to build instance. [ 708.227708] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance f6b76518-d691-4e4f-861a-624a1684e564 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.590307] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7ec0013-1717-44ef-8786-09ac26d24158 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Lock "93a736b5-5423-4378-8b0c-73a0c46414ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.038s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.651570] env[61974]: INFO nova.compute.manager [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Rebuilding instance [ 708.702580] env[61974]: DEBUG nova.compute.manager [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 708.703476] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd1ebfc-5e01-4877-954f-4c1647ffbcda {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.730542] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 59b1ad04-c949-4b07-af77-f84f842dd9ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 709.094245] env[61974]: DEBUG nova.compute.manager [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 709.219580] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 709.219906] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5ca98a21-fab1-42d1-b04b-653389efa6b1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.228083] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 709.228083] env[61974]: value = "task-1378928" [ 709.228083] env[61974]: _type = "Task" [ 709.228083] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.232033] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 2b74ee60-ce70-429a-9ccb-1f96c236cf8c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 709.239975] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378928, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.616975] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.735413] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance b935b7e2-ba4b-452a-9eca-2fad5acc9055 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 709.743021] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378928, 'name': PowerOffVM_Task, 'duration_secs': 0.133225} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.743021] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 709.743021] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 709.743021] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9be952-c6a7-4228-8b9a-7f65363caa7d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.750914] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 709.751211] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b270e49-fe73-433c-91fa-9f823d59da1d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.779593] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 709.779821] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 709.779997] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Deleting the datastore file [datastore2] 93a736b5-5423-4378-8b0c-73a0c46414ca {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 709.780270] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-91dac8dd-eec4-4e5c-8ddf-80fae659843e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.790474] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 709.790474] env[61974]: value = "task-1378930" [ 709.790474] env[61974]: _type = "Task" [ 709.790474] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.798844] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378930, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.243090] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 4628f895-1ae5-4d25-8095-f892b86769f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 710.304772] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378930, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146934} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.305389] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 710.305821] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 710.306280] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 710.745806] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 90fd5720-923c-4243-9f62-908e35fe35a6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 711.250016] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance e6bc38d5-056f-40c2-a2ed-467200da2738 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 711.338870] env[61974]: DEBUG nova.virt.hardware [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 711.339024] env[61974]: DEBUG nova.virt.hardware [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 711.339194] env[61974]: DEBUG nova.virt.hardware [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 711.339394] env[61974]: DEBUG nova.virt.hardware [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 711.339540] env[61974]: DEBUG nova.virt.hardware [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 711.339686] env[61974]: DEBUG nova.virt.hardware [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 711.339889] env[61974]: DEBUG nova.virt.hardware [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 711.340056] env[61974]: DEBUG nova.virt.hardware [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 711.340222] env[61974]: DEBUG nova.virt.hardware [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 711.340379] env[61974]: DEBUG nova.virt.hardware [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 711.340546] env[61974]: DEBUG nova.virt.hardware [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 711.341403] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af3c982-7ad5-4b38-aee3-4a4f83816745 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.350845] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea08d7b8-336c-4d01-9b84-8bf9e293d76b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.364302] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Instance VIF info [] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 711.370038] env[61974]: DEBUG oslo.service.loopingcall [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 711.370275] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 711.370480] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0af97229-5fa3-4f78-9f1a-a43521dd2bf4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.387453] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 711.387453] env[61974]: value = "task-1378932" [ 711.387453] env[61974]: _type = "Task" [ 711.387453] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.403394] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1378932, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.753514] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 2601b97a-8ef6-4b61-b0e0-dd6c7c203206 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 711.753834] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 711.753994] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 711.897852] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1378932, 'name': CreateVM_Task, 'duration_secs': 0.284511} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.900015] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 711.900612] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.900791] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.901089] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 711.901339] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81c33005-3e19-4af1-b9b1-3468cfb4f420 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.906080] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 711.906080] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52e8eefc-62da-bbde-c491-589a07fb580c" [ 711.906080] env[61974]: _type = "Task" [ 711.906080] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.914852] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52e8eefc-62da-bbde-c491-589a07fb580c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.081834] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aedf3907-5ac9-456e-804f-febe6997174a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.090435] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0032af-86df-4dc9-bc49-eb00d27b70b8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.121047] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c833d751-cb32-45ec-aa68-635ceffc8118 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.128653] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74bf74bf-1a8f-4980-a00c-a9085647e51e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.142852] env[61974]: DEBUG nova.compute.provider_tree [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.418292] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52e8eefc-62da-bbde-c491-589a07fb580c, 'name': SearchDatastore_Task, 'duration_secs': 0.009631} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.418593] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.418836] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 712.419109] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.419343] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.419546] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 712.420142] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3db8231-8552-4c0f-b139-52ee64f49b29 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.431023] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 712.431023] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 712.431023] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad810718-863e-4d82-ab71-e832fc3bb91a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.436047] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 712.436047] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]523249dc-8e7d-6014-f543-9f424c9a3c01" [ 712.436047] env[61974]: _type = "Task" [ 712.436047] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.443592] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]523249dc-8e7d-6014-f543-9f424c9a3c01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.645858] env[61974]: DEBUG nova.scheduler.client.report [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 712.946494] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]523249dc-8e7d-6014-f543-9f424c9a3c01, 'name': SearchDatastore_Task, 'duration_secs': 0.009538} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.947322] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5b19b8a-2758-4644-b5f1-560329fa9e53 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.952736] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 712.952736] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52dee211-9d1a-6b66-3513-80e1a66e1f01" [ 712.952736] env[61974]: _type = "Task" [ 712.952736] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.960467] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52dee211-9d1a-6b66-3513-80e1a66e1f01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.151638] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 713.151887] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 13.536s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.152182] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 31.127s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.464655] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52dee211-9d1a-6b66-3513-80e1a66e1f01, 'name': SearchDatastore_Task, 'duration_secs': 0.015449} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.465122] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.465425] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 93a736b5-5423-4378-8b0c-73a0c46414ca/93a736b5-5423-4378-8b0c-73a0c46414ca.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 713.465717] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dd26d1a1-1698-47c1-af18-103f8b282f16 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.473500] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 713.473500] env[61974]: value = "task-1378934" [ 713.473500] env[61974]: _type = "Task" [ 713.473500] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.481692] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378934, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.983975] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378934, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.062141] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fdf21ac-24d1-46ee-85ea-cb057910a5bc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.070581] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed13087b-e8fa-4308-8b3c-cfcd4efa2f66 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.101986] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1598d3e-b23a-417f-a488-5e1e65ce401d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.109764] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f045ab5-4882-4ce5-95b0-7e076549c41b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.123345] env[61974]: DEBUG nova.compute.provider_tree [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.485394] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378934, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.608458} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.485667] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 93a736b5-5423-4378-8b0c-73a0c46414ca/93a736b5-5423-4378-8b0c-73a0c46414ca.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 714.485890] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 714.486147] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-26e67e03-31f9-49e8-872b-f5902704b199 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.494448] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 714.494448] env[61974]: value = "task-1378935" [ 714.494448] env[61974]: _type = "Task" [ 714.494448] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.502413] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378935, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.626654] env[61974]: DEBUG nova.scheduler.client.report [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 715.004338] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378935, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075174} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.004553] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 715.005342] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c743b9f-83ea-456f-81f0-f3e56695278a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.025318] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] 93a736b5-5423-4378-8b0c-73a0c46414ca/93a736b5-5423-4378-8b0c-73a0c46414ca.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 715.025614] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c9a9a37-d80a-4980-a784-7c08112b5dfe {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.046258] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 715.046258] env[61974]: value = "task-1378936" [ 715.046258] env[61974]: _type = "Task" [ 715.046258] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.054376] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378936, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.131562] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.979s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.132202] env[61974]: ERROR nova.compute.manager [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ab348f2c-e70a-4e4e-a612-4632d1f93c33, please check neutron logs for more information. [ 715.132202] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Traceback (most recent call last): [ 715.132202] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 715.132202] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] self.driver.spawn(context, instance, image_meta, [ 715.132202] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 715.132202] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 715.132202] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 715.132202] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] vm_ref = self.build_virtual_machine(instance, [ 715.132202] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 715.132202] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] vif_infos = vmwarevif.get_vif_info(self._session, [ 715.132202] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 715.132542] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] for vif in network_info: [ 715.132542] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 715.132542] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] return self._sync_wrapper(fn, *args, **kwargs) [ 715.132542] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 715.132542] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] self.wait() [ 715.132542] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 715.132542] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] self[:] = self._gt.wait() [ 715.132542] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 715.132542] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] return self._exit_event.wait() [ 715.132542] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 715.132542] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] result = hub.switch() [ 715.132542] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 715.132542] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] return self.greenlet.switch() [ 715.132911] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 715.132911] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] result = function(*args, **kwargs) [ 715.132911] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 715.132911] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] return func(*args, **kwargs) [ 715.132911] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 715.132911] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] raise e [ 715.132911] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 715.132911] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] nwinfo = self.network_api.allocate_for_instance( [ 715.132911] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 715.132911] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] created_port_ids = self._update_ports_for_instance( [ 715.132911] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 715.132911] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] with excutils.save_and_reraise_exception(): [ 715.132911] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 715.133278] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] self.force_reraise() [ 715.133278] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 715.133278] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] raise self.value [ 715.133278] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 715.133278] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] updated_port = self._update_port( [ 715.133278] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 715.133278] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] _ensure_no_port_binding_failure(port) [ 715.133278] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 715.133278] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] raise exception.PortBindingFailed(port_id=port['id']) [ 715.133278] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] nova.exception.PortBindingFailed: Binding failed for port ab348f2c-e70a-4e4e-a612-4632d1f93c33, please check neutron logs for more information. [ 715.133278] env[61974]: ERROR nova.compute.manager [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] [ 715.133586] env[61974]: DEBUG nova.compute.utils [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Binding failed for port ab348f2c-e70a-4e4e-a612-4632d1f93c33, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 715.134605] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.977s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.135657] env[61974]: INFO nova.compute.claims [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 715.138461] env[61974]: DEBUG nova.compute.manager [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Build of instance 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0 was re-scheduled: Binding failed for port ab348f2c-e70a-4e4e-a612-4632d1f93c33, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 715.139118] env[61974]: DEBUG nova.compute.manager [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 715.139118] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Acquiring lock "refresh_cache-18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.139262] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Acquired lock "refresh_cache-18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.140029] env[61974]: DEBUG nova.network.neutron [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 715.557059] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378936, 'name': ReconfigVM_Task, 'duration_secs': 0.488198} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.558039] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Reconfigured VM instance instance-0000001a to attach disk [datastore1] 93a736b5-5423-4378-8b0c-73a0c46414ca/93a736b5-5423-4378-8b0c-73a0c46414ca.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 715.558615] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-400fdc5c-0af0-4335-ba96-d0ffff00c04b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.565937] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 715.565937] env[61974]: value = "task-1378938" [ 715.565937] env[61974]: _type = "Task" [ 715.565937] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.574389] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378938, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.766349] env[61974]: DEBUG nova.network.neutron [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 715.871072] env[61974]: DEBUG nova.network.neutron [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.076013] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378938, 'name': Rename_Task, 'duration_secs': 0.14805} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.076308] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 716.076563] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1e42b95e-65cb-453c-bbec-1777931a742d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.083810] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 716.083810] env[61974]: value = "task-1378939" [ 716.083810] env[61974]: _type = "Task" [ 716.083810] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.092052] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378939, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.373429] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Releasing lock "refresh_cache-18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.373670] env[61974]: DEBUG nova.compute.manager [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 716.373851] env[61974]: DEBUG nova.compute.manager [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 716.374025] env[61974]: DEBUG nova.network.neutron [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 716.390187] env[61974]: DEBUG nova.network.neutron [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 716.493646] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4de789a-ee31-4825-ac0b-11b7227b3a4d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.501674] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a20447-143d-49b9-a9d9-2ea655675002 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.532962] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72fecffb-1cc2-49ce-b2da-2a7279a6c13c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.541154] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b265c161-6f61-46f1-a591-0d31725c817b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.554994] env[61974]: DEBUG nova.compute.provider_tree [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.597119] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378939, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.894450] env[61974]: DEBUG nova.network.neutron [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.059023] env[61974]: DEBUG nova.scheduler.client.report [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 717.095534] env[61974]: DEBUG oslo_vmware.api [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378939, 'name': PowerOnVM_Task, 'duration_secs': 0.529914} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.095798] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 717.096010] env[61974]: DEBUG nova.compute.manager [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 717.096799] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112a2ae4-9ee7-4e72-a6fd-a55d70e72cbd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.398995] env[61974]: INFO nova.compute.manager [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] [instance: 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0] Took 1.02 seconds to deallocate network for instance. [ 717.563297] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.429s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.564100] env[61974]: DEBUG nova.compute.manager [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 717.568072] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 30.313s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.613157] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.075193] env[61974]: DEBUG nova.compute.utils [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 718.079495] env[61974]: DEBUG nova.compute.manager [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 718.079687] env[61974]: DEBUG nova.network.neutron [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 718.140106] env[61974]: DEBUG nova.policy [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7983fdd8bf664776a5cded0474f1f780', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9e31c7439644f9591469e6795390465', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 718.442070] env[61974]: INFO nova.scheduler.client.report [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Deleted allocations for instance 18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0 [ 718.456322] env[61974]: DEBUG oslo_concurrency.lockutils [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Acquiring lock "242d6159-5223-4815-900c-4c1285c7a90c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.456585] env[61974]: DEBUG oslo_concurrency.lockutils [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Lock "242d6159-5223-4815-900c-4c1285c7a90c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.476522] env[61974]: DEBUG nova.network.neutron [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Successfully created port: 43527828-744c-447a-a35e-b7a23fac5978 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 718.493954] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b67259-9b1e-43dd-87fb-fac27f5d499a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.502504] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e424dea7-4247-471e-a9a0-1d967752a9ca {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.536132] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d6c419-3358-4657-b431-34e7c76e57fe {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.544545] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798d0506-4142-4d55-8ff1-3a55e17ee9c4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.559107] env[61974]: DEBUG nova.compute.provider_tree [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 718.581031] env[61974]: DEBUG nova.compute.manager [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 718.766031] env[61974]: INFO nova.compute.manager [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Rebuilding instance [ 718.819498] env[61974]: DEBUG nova.compute.manager [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 718.820097] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131f03cb-11d3-4bd4-8bb3-20b7c998904c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.956829] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6f3f557-4896-4c08-8f02-cbacacfab261 tempest-ImagesOneServerTestJSON-124460901 tempest-ImagesOneServerTestJSON-124460901-project-member] Lock "18839a5e-fbe2-4f74-b4bb-432b0b2ae3f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.959s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.062761] env[61974]: DEBUG nova.scheduler.client.report [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 719.333432] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 719.333751] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3e60f1e8-7682-4d78-b45f-f2a1ece434a1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.342888] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Waiting for the task: (returnval){ [ 719.342888] env[61974]: value = "task-1378940" [ 719.342888] env[61974]: _type = "Task" [ 719.342888] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.352693] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Task: {'id': task-1378940, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.367945] env[61974]: DEBUG nova.compute.manager [req-be3a2c77-14e9-4327-b4ab-16ab421cefe2 req-ec253b87-b869-4af2-b634-52ed87ee54ae service nova] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Received event network-changed-43527828-744c-447a-a35e-b7a23fac5978 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 719.368512] env[61974]: DEBUG nova.compute.manager [req-be3a2c77-14e9-4327-b4ab-16ab421cefe2 req-ec253b87-b869-4af2-b634-52ed87ee54ae service nova] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Refreshing instance network info cache due to event network-changed-43527828-744c-447a-a35e-b7a23fac5978. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 719.368594] env[61974]: DEBUG oslo_concurrency.lockutils [req-be3a2c77-14e9-4327-b4ab-16ab421cefe2 req-ec253b87-b869-4af2-b634-52ed87ee54ae service nova] Acquiring lock "refresh_cache-ccc4d6d9-979a-468a-9b7a-4633662c4052" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.368748] env[61974]: DEBUG oslo_concurrency.lockutils [req-be3a2c77-14e9-4327-b4ab-16ab421cefe2 req-ec253b87-b869-4af2-b634-52ed87ee54ae service nova] Acquired lock "refresh_cache-ccc4d6d9-979a-468a-9b7a-4633662c4052" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.369223] env[61974]: DEBUG nova.network.neutron [req-be3a2c77-14e9-4327-b4ab-16ab421cefe2 req-ec253b87-b869-4af2-b634-52ed87ee54ae service nova] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Refreshing network info cache for port 43527828-744c-447a-a35e-b7a23fac5978 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 719.461731] env[61974]: DEBUG nova.compute.manager [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 719.569379] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.570156] env[61974]: ERROR nova.compute.manager [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5db9f7dd-b982-484c-9b67-73d7df3b7601, please check neutron logs for more information. [ 719.570156] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Traceback (most recent call last): [ 719.570156] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 719.570156] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] self.driver.spawn(context, instance, image_meta, [ 719.570156] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 719.570156] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 719.570156] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 719.570156] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] vm_ref = self.build_virtual_machine(instance, [ 719.570156] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 719.570156] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] vif_infos = vmwarevif.get_vif_info(self._session, [ 719.570156] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 719.570583] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] for vif in network_info: [ 719.570583] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 719.570583] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] return self._sync_wrapper(fn, *args, **kwargs) [ 719.570583] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 719.570583] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] self.wait() [ 719.570583] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 719.570583] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] self[:] = self._gt.wait() [ 719.570583] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 719.570583] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] return self._exit_event.wait() [ 719.570583] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 719.570583] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] result = hub.switch() [ 719.570583] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 719.570583] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] return self.greenlet.switch() [ 719.571027] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 719.571027] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] result = function(*args, **kwargs) [ 719.571027] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 719.571027] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] return func(*args, **kwargs) [ 719.571027] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 719.571027] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] raise e [ 719.571027] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 719.571027] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] nwinfo = self.network_api.allocate_for_instance( [ 719.571027] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 719.571027] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] created_port_ids = self._update_ports_for_instance( [ 719.571027] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 719.571027] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] with excutils.save_and_reraise_exception(): [ 719.571027] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 719.571435] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] self.force_reraise() [ 719.571435] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 719.571435] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] raise self.value [ 719.571435] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 719.571435] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] updated_port = self._update_port( [ 719.571435] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 719.571435] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] _ensure_no_port_binding_failure(port) [ 719.571435] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 719.571435] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] raise exception.PortBindingFailed(port_id=port['id']) [ 719.571435] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] nova.exception.PortBindingFailed: Binding failed for port 5db9f7dd-b982-484c-9b67-73d7df3b7601, please check neutron logs for more information. [ 719.571435] env[61974]: ERROR nova.compute.manager [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] [ 719.571775] env[61974]: DEBUG nova.compute.utils [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Binding failed for port 5db9f7dd-b982-484c-9b67-73d7df3b7601, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 719.572693] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.920s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.574098] env[61974]: INFO nova.compute.claims [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 719.578483] env[61974]: DEBUG nova.compute.manager [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Build of instance 9d599717-0bda-4996-89d8-c41ce089eaac was re-scheduled: Binding failed for port 5db9f7dd-b982-484c-9b67-73d7df3b7601, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 719.579428] env[61974]: DEBUG nova.compute.manager [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 719.579509] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Acquiring lock "refresh_cache-9d599717-0bda-4996-89d8-c41ce089eaac" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.580424] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Acquired lock "refresh_cache-9d599717-0bda-4996-89d8-c41ce089eaac" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.580424] env[61974]: DEBUG nova.network.neutron [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 719.594857] env[61974]: DEBUG nova.compute.manager [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 719.618132] env[61974]: ERROR nova.compute.manager [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 43527828-744c-447a-a35e-b7a23fac5978, please check neutron logs for more information. [ 719.618132] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 719.618132] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 719.618132] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 719.618132] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 719.618132] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 719.618132] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 719.618132] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 719.618132] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 719.618132] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 719.618132] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 719.618132] env[61974]: ERROR nova.compute.manager raise self.value [ 719.618132] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 719.618132] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 719.618132] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 719.618132] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 719.618676] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 719.618676] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 719.618676] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 43527828-744c-447a-a35e-b7a23fac5978, please check neutron logs for more information. [ 719.618676] env[61974]: ERROR nova.compute.manager [ 719.618676] env[61974]: Traceback (most recent call last): [ 719.618676] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 719.618676] env[61974]: listener.cb(fileno) [ 719.618676] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 719.618676] env[61974]: result = function(*args, **kwargs) [ 719.618676] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 719.618676] env[61974]: return func(*args, **kwargs) [ 719.618676] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 719.618676] env[61974]: raise e [ 719.618676] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 719.618676] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 719.618676] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 719.618676] env[61974]: created_port_ids = self._update_ports_for_instance( [ 719.618676] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 719.618676] env[61974]: with excutils.save_and_reraise_exception(): [ 719.618676] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 719.618676] env[61974]: self.force_reraise() [ 719.618676] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 719.618676] env[61974]: raise self.value [ 719.618676] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 719.618676] env[61974]: updated_port = self._update_port( [ 719.618676] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 719.618676] env[61974]: _ensure_no_port_binding_failure(port) [ 719.618676] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 719.618676] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 719.619486] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 43527828-744c-447a-a35e-b7a23fac5978, please check neutron logs for more information. [ 719.619486] env[61974]: Removing descriptor: 21 [ 719.630152] env[61974]: DEBUG nova.virt.hardware [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:55:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='165f5050-61ac-4ee2-935a-b8729b59896a',id=35,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1990101109',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 719.630152] env[61974]: DEBUG nova.virt.hardware [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 719.630152] env[61974]: DEBUG nova.virt.hardware [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 719.631301] env[61974]: DEBUG nova.virt.hardware [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 719.631301] env[61974]: DEBUG nova.virt.hardware [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 719.631510] env[61974]: DEBUG nova.virt.hardware [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 719.631732] env[61974]: DEBUG nova.virt.hardware [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 719.631877] env[61974]: DEBUG nova.virt.hardware [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 719.632104] env[61974]: DEBUG nova.virt.hardware [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 719.632240] env[61974]: DEBUG nova.virt.hardware [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 719.632416] env[61974]: DEBUG nova.virt.hardware [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 719.633765] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7886f036-7231-4e83-873c-ce576255f54c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.644895] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61e8c83-fdea-43db-b357-19af840bcb8d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.660980] env[61974]: ERROR nova.compute.manager [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 43527828-744c-447a-a35e-b7a23fac5978, please check neutron logs for more information. [ 719.660980] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Traceback (most recent call last): [ 719.660980] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 719.660980] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] yield resources [ 719.660980] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 719.660980] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] self.driver.spawn(context, instance, image_meta, [ 719.660980] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 719.660980] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] self._vmops.spawn(context, instance, image_meta, injected_files, [ 719.660980] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 719.660980] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] vm_ref = self.build_virtual_machine(instance, [ 719.660980] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 719.661406] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] vif_infos = vmwarevif.get_vif_info(self._session, [ 719.661406] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 719.661406] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] for vif in network_info: [ 719.661406] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 719.661406] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] return self._sync_wrapper(fn, *args, **kwargs) [ 719.661406] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 719.661406] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] self.wait() [ 719.661406] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 719.661406] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] self[:] = self._gt.wait() [ 719.661406] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 719.661406] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] return self._exit_event.wait() [ 719.661406] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 719.661406] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] current.throw(*self._exc) [ 719.661722] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 719.661722] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] result = function(*args, **kwargs) [ 719.661722] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 719.661722] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] return func(*args, **kwargs) [ 719.661722] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 719.661722] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] raise e [ 719.661722] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 719.661722] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] nwinfo = self.network_api.allocate_for_instance( [ 719.661722] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 719.661722] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] created_port_ids = self._update_ports_for_instance( [ 719.661722] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 719.661722] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] with excutils.save_and_reraise_exception(): [ 719.661722] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 719.662063] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] self.force_reraise() [ 719.662063] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 719.662063] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] raise self.value [ 719.662063] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 719.662063] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] updated_port = self._update_port( [ 719.662063] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 719.662063] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] _ensure_no_port_binding_failure(port) [ 719.662063] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 719.662063] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] raise exception.PortBindingFailed(port_id=port['id']) [ 719.662063] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] nova.exception.PortBindingFailed: Binding failed for port 43527828-744c-447a-a35e-b7a23fac5978, please check neutron logs for more information. [ 719.662063] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] [ 719.662063] env[61974]: INFO nova.compute.manager [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Terminating instance [ 719.665209] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Acquiring lock "refresh_cache-ccc4d6d9-979a-468a-9b7a-4633662c4052" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.854366] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Task: {'id': task-1378940, 'name': PowerOffVM_Task, 'duration_secs': 0.229168} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.854658] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 719.854854] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 719.855621] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c7c55dc-586c-48c8-82d6-14bc1f886426 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.862829] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 719.863088] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fea8e3d8-74e5-4398-b3ba-44640c99e691 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.888145] env[61974]: DEBUG nova.network.neutron [req-be3a2c77-14e9-4327-b4ab-16ab421cefe2 req-ec253b87-b869-4af2-b634-52ed87ee54ae service nova] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 719.892334] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 719.892533] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 719.892711] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Deleting the datastore file [datastore1] 93a736b5-5423-4378-8b0c-73a0c46414ca {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 719.892967] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e23ab022-09a5-4ee8-9e38-b12e01f2585a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.900953] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Waiting for the task: (returnval){ [ 719.900953] env[61974]: value = "task-1378942" [ 719.900953] env[61974]: _type = "Task" [ 719.900953] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.911938] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Task: {'id': task-1378942, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.968878] env[61974]: DEBUG nova.network.neutron [req-be3a2c77-14e9-4327-b4ab-16ab421cefe2 req-ec253b87-b869-4af2-b634-52ed87ee54ae service nova] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.993163] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 720.102478] env[61974]: DEBUG nova.network.neutron [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 720.199563] env[61974]: DEBUG nova.network.neutron [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.415566] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Task: {'id': task-1378942, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.355842} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.415835] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 720.416033] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 720.416223] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 720.474025] env[61974]: DEBUG oslo_concurrency.lockutils [req-be3a2c77-14e9-4327-b4ab-16ab421cefe2 req-ec253b87-b869-4af2-b634-52ed87ee54ae service nova] Releasing lock "refresh_cache-ccc4d6d9-979a-468a-9b7a-4633662c4052" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.474025] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Acquired lock "refresh_cache-ccc4d6d9-979a-468a-9b7a-4633662c4052" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.474198] env[61974]: DEBUG nova.network.neutron [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 720.704856] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Releasing lock "refresh_cache-9d599717-0bda-4996-89d8-c41ce089eaac" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.705183] env[61974]: DEBUG nova.compute.manager [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 720.705310] env[61974]: DEBUG nova.compute.manager [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 720.705480] env[61974]: DEBUG nova.network.neutron [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 720.735277] env[61974]: DEBUG nova.network.neutron [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 721.003944] env[61974]: DEBUG nova.network.neutron [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 721.043149] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b554307-ad5c-4ac9-b59e-800ae1f9e971 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.052287] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd2da77c-9b78-4e49-85dc-afa26250eea4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.087698] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a792b5-1a5d-4a28-b055-7740f791d0e0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.091811] env[61974]: DEBUG nova.network.neutron [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.099514] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c96d5f-662b-415a-9088-262797c66d31 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.115170] env[61974]: DEBUG nova.compute.provider_tree [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 721.243677] env[61974]: DEBUG nova.network.neutron [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.394211] env[61974]: DEBUG nova.compute.manager [req-dfb5abee-38a9-4516-aa2d-d8b9292d7b29 req-1f270edb-e1df-41a2-b4d6-89dc3d4ce10c service nova] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Received event network-vif-deleted-43527828-744c-447a-a35e-b7a23fac5978 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 721.448510] env[61974]: DEBUG nova.virt.hardware [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 721.448756] env[61974]: DEBUG nova.virt.hardware [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 721.448915] env[61974]: DEBUG nova.virt.hardware [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 721.449101] env[61974]: DEBUG nova.virt.hardware [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 721.449250] env[61974]: DEBUG nova.virt.hardware [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 721.449393] env[61974]: DEBUG nova.virt.hardware [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 721.449596] env[61974]: DEBUG nova.virt.hardware [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 721.449752] env[61974]: DEBUG nova.virt.hardware [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 721.449988] env[61974]: DEBUG nova.virt.hardware [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 721.450197] env[61974]: DEBUG nova.virt.hardware [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 721.450374] env[61974]: DEBUG nova.virt.hardware [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 721.451240] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c17dec-47fa-43ae-bfcb-d36db4dff475 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.460112] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-921ef1e0-8db9-41b2-9a03-9e6ab5ba724a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.475392] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Instance VIF info [] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 721.480856] env[61974]: DEBUG oslo.service.loopingcall [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 721.481104] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 721.481310] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b212396a-c570-4808-8ec2-8e5c7da51e70 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.499030] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 721.499030] env[61974]: value = "task-1378943" [ 721.499030] env[61974]: _type = "Task" [ 721.499030] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.506434] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1378943, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.595228] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Releasing lock "refresh_cache-ccc4d6d9-979a-468a-9b7a-4633662c4052" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 721.595730] env[61974]: DEBUG nova.compute.manager [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 721.595981] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 721.596373] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-00468ed8-0409-4ba3-8465-b4d3675b9969 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.606801] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2cf4f2b-5919-4df3-ab5d-92697f6d1b33 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.618376] env[61974]: DEBUG nova.scheduler.client.report [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 721.637028] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ccc4d6d9-979a-468a-9b7a-4633662c4052 could not be found. [ 721.637295] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 721.637482] env[61974]: INFO nova.compute.manager [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Took 0.04 seconds to destroy the instance on the hypervisor. [ 721.637898] env[61974]: DEBUG oslo.service.loopingcall [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 721.638728] env[61974]: DEBUG nova.compute.manager [-] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 721.638824] env[61974]: DEBUG nova.network.neutron [-] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 721.660530] env[61974]: DEBUG nova.network.neutron [-] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 721.746079] env[61974]: INFO nova.compute.manager [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 9d599717-0bda-4996-89d8-c41ce089eaac] Took 1.04 seconds to deallocate network for instance. [ 722.009311] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1378943, 'name': CreateVM_Task, 'duration_secs': 0.279367} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.009589] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 722.009959] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 722.010143] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.010452] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 722.010694] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a9fa43e-615b-4ea7-a11c-2ae3707db091 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.015518] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Waiting for the task: (returnval){ [ 722.015518] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52dbbb4d-13e3-25a9-fa6f-e76583911e77" [ 722.015518] env[61974]: _type = "Task" [ 722.015518] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.023593] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52dbbb4d-13e3-25a9-fa6f-e76583911e77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.123309] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.551s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.123872] env[61974]: DEBUG nova.compute.manager [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 722.126804] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 32.115s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.162878] env[61974]: DEBUG nova.network.neutron [-] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.529846] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52dbbb4d-13e3-25a9-fa6f-e76583911e77, 'name': SearchDatastore_Task, 'duration_secs': 0.010249} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.530231] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.530540] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 722.530786] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 722.531046] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.531287] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 722.531613] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-218247f1-2af0-423d-96e6-2bccb525f1bc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.541454] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 722.541673] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 722.542467] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb8b3249-53a6-40cf-993e-737d9d040d48 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.549396] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Waiting for the task: (returnval){ [ 722.549396] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52e652d4-6a27-68d4-a208-bc1a553a0ea5" [ 722.549396] env[61974]: _type = "Task" [ 722.549396] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.560403] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52e652d4-6a27-68d4-a208-bc1a553a0ea5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.638743] env[61974]: DEBUG nova.compute.utils [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 722.640310] env[61974]: DEBUG nova.compute.manager [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 722.640474] env[61974]: DEBUG nova.network.neutron [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 722.665193] env[61974]: INFO nova.compute.manager [-] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Took 1.03 seconds to deallocate network for instance. [ 722.673180] env[61974]: DEBUG nova.compute.claims [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 722.673609] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.722674] env[61974]: DEBUG nova.policy [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '139f2fab7d4c492ab0d6fb16ea947457', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4757d24b61794cfcaefff2ad44e02b74', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 722.792818] env[61974]: INFO nova.scheduler.client.report [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Deleted allocations for instance 9d599717-0bda-4996-89d8-c41ce089eaac [ 723.061663] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52e652d4-6a27-68d4-a208-bc1a553a0ea5, 'name': SearchDatastore_Task, 'duration_secs': 0.008519} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.064978] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f42ca30-e78a-4304-ad95-cf8868dd2c9f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.071729] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Waiting for the task: (returnval){ [ 723.071729] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52b09247-bd79-9e64-b7d3-6d7143aaccdf" [ 723.071729] env[61974]: _type = "Task" [ 723.071729] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.076903] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c88b5351-d406-4bb2-8a61-65e20ae6dc95 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.088449] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52b09247-bd79-9e64-b7d3-6d7143aaccdf, 'name': SearchDatastore_Task, 'duration_secs': 0.009801} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.090260] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.090540] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 93a736b5-5423-4378-8b0c-73a0c46414ca/93a736b5-5423-4378-8b0c-73a0c46414ca.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 723.090846] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-74979b8d-19ce-4aaf-89dc-d4536956eb0e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.093417] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd516e2-2de9-4785-8edf-481d678566df {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.131615] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cad8372-5ab5-420c-9dc0-e22dfc4e0437 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.134315] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Waiting for the task: (returnval){ [ 723.134315] env[61974]: value = "task-1378944" [ 723.134315] env[61974]: _type = "Task" [ 723.134315] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.141419] env[61974]: DEBUG nova.network.neutron [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Successfully created port: 24fcf52e-4a33-4aaf-b26b-f2df48d0f409 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 723.145343] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7d47de-4a73-4d98-a0d7-eb8542e84f21 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.150881] env[61974]: DEBUG nova.compute.manager [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 723.156978] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Task: {'id': task-1378944, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.173163] env[61974]: DEBUG nova.compute.provider_tree [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 723.306113] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be87e155-0fc6-4eed-b620-9f96225ef7c1 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Lock "9d599717-0bda-4996-89d8-c41ce089eaac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.054s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.667648] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Task: {'id': task-1378944, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491309} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.668149] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 93a736b5-5423-4378-8b0c-73a0c46414ca/93a736b5-5423-4378-8b0c-73a0c46414ca.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 723.668367] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 723.668620] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0425b418-c1ff-4fca-be40-17cf84f8b836 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.676212] env[61974]: DEBUG nova.scheduler.client.report [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 723.684111] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Waiting for the task: (returnval){ [ 723.684111] env[61974]: value = "task-1378945" [ 723.684111] env[61974]: _type = "Task" [ 723.684111] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.696745] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Task: {'id': task-1378945, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.812070] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 724.173023] env[61974]: DEBUG nova.compute.manager [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 724.184977] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.058s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.190024] env[61974]: ERROR nova.compute.manager [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 95b37ae0-fbac-4f29-962e-50cd9dfdca8b, please check neutron logs for more information. [ 724.190024] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Traceback (most recent call last): [ 724.190024] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 724.190024] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] self.driver.spawn(context, instance, image_meta, [ 724.190024] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 724.190024] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] self._vmops.spawn(context, instance, image_meta, injected_files, [ 724.190024] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 724.190024] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] vm_ref = self.build_virtual_machine(instance, [ 724.190024] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 724.190024] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] vif_infos = vmwarevif.get_vif_info(self._session, [ 724.190024] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 724.190414] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] for vif in network_info: [ 724.190414] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 724.190414] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] return self._sync_wrapper(fn, *args, **kwargs) [ 724.190414] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 724.190414] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] self.wait() [ 724.190414] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 724.190414] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] self[:] = self._gt.wait() [ 724.190414] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 724.190414] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] return self._exit_event.wait() [ 724.190414] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 724.190414] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] result = hub.switch() [ 724.190414] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 724.190414] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] return self.greenlet.switch() [ 724.190777] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 724.190777] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] result = function(*args, **kwargs) [ 724.190777] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 724.190777] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] return func(*args, **kwargs) [ 724.190777] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 724.190777] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] raise e [ 724.190777] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 724.190777] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] nwinfo = self.network_api.allocate_for_instance( [ 724.190777] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 724.190777] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] created_port_ids = self._update_ports_for_instance( [ 724.190777] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 724.190777] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] with excutils.save_and_reraise_exception(): [ 724.190777] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 724.191150] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] self.force_reraise() [ 724.191150] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 724.191150] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] raise self.value [ 724.191150] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 724.191150] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] updated_port = self._update_port( [ 724.191150] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 724.191150] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] _ensure_no_port_binding_failure(port) [ 724.191150] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 724.191150] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] raise exception.PortBindingFailed(port_id=port['id']) [ 724.191150] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] nova.exception.PortBindingFailed: Binding failed for port 95b37ae0-fbac-4f29-962e-50cd9dfdca8b, please check neutron logs for more information. [ 724.191150] env[61974]: ERROR nova.compute.manager [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] [ 724.191458] env[61974]: DEBUG nova.compute.utils [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Binding failed for port 95b37ae0-fbac-4f29-962e-50cd9dfdca8b, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 724.191458] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.705s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.192570] env[61974]: INFO nova.compute.claims [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 724.199902] env[61974]: DEBUG nova.compute.manager [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Build of instance 8a00b16d-8274-4728-920b-a30e95fa4048 was re-scheduled: Binding failed for port 95b37ae0-fbac-4f29-962e-50cd9dfdca8b, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 724.200383] env[61974]: DEBUG nova.compute.manager [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 724.200611] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "refresh_cache-8a00b16d-8274-4728-920b-a30e95fa4048" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.200752] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquired lock "refresh_cache-8a00b16d-8274-4728-920b-a30e95fa4048" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.200905] env[61974]: DEBUG nova.network.neutron [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 724.218753] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Task: {'id': task-1378945, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069211} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.220983] env[61974]: DEBUG nova.virt.hardware [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 724.221227] env[61974]: DEBUG nova.virt.hardware [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 724.221383] env[61974]: DEBUG nova.virt.hardware [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 724.221567] env[61974]: DEBUG nova.virt.hardware [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 724.221712] env[61974]: DEBUG nova.virt.hardware [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 724.221855] env[61974]: DEBUG nova.virt.hardware [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 724.222069] env[61974]: DEBUG nova.virt.hardware [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 724.222229] env[61974]: DEBUG nova.virt.hardware [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 724.222421] env[61974]: DEBUG nova.virt.hardware [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 724.222545] env[61974]: DEBUG nova.virt.hardware [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 724.222712] env[61974]: DEBUG nova.virt.hardware [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 724.226874] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 724.226874] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b3e9e07-f51e-4b1b-9910-30a9f7957a94 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.228277] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce091da-47aa-4690-988b-d41a104211ec {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.237191] env[61974]: DEBUG nova.network.neutron [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 724.247244] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b94c5b-e47d-4feb-9183-392ec92f3c67 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.263374] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Reconfiguring VM instance instance-0000001a to attach disk [datastore2] 93a736b5-5423-4378-8b0c-73a0c46414ca/93a736b5-5423-4378-8b0c-73a0c46414ca.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 724.265105] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20e3d872-4cbc-455c-b092-52ed117c0b63 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.297302] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Waiting for the task: (returnval){ [ 724.297302] env[61974]: value = "task-1378946" [ 724.297302] env[61974]: _type = "Task" [ 724.297302] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.351241] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.417921] env[61974]: DEBUG nova.network.neutron [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.472152] env[61974]: ERROR nova.compute.manager [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 24fcf52e-4a33-4aaf-b26b-f2df48d0f409, please check neutron logs for more information. [ 724.472152] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 724.472152] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 724.472152] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 724.472152] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 724.472152] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 724.472152] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 724.472152] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 724.472152] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 724.472152] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 724.472152] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 724.472152] env[61974]: ERROR nova.compute.manager raise self.value [ 724.472152] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 724.472152] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 724.472152] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 724.472152] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 724.472621] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 724.472621] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 724.472621] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 24fcf52e-4a33-4aaf-b26b-f2df48d0f409, please check neutron logs for more information. [ 724.472621] env[61974]: ERROR nova.compute.manager [ 724.472621] env[61974]: Traceback (most recent call last): [ 724.472621] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 724.472621] env[61974]: listener.cb(fileno) [ 724.472621] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 724.472621] env[61974]: result = function(*args, **kwargs) [ 724.472621] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 724.472621] env[61974]: return func(*args, **kwargs) [ 724.472621] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 724.472621] env[61974]: raise e [ 724.472621] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 724.472621] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 724.472621] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 724.472621] env[61974]: created_port_ids = self._update_ports_for_instance( [ 724.472621] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 724.472621] env[61974]: with excutils.save_and_reraise_exception(): [ 724.472621] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 724.472621] env[61974]: self.force_reraise() [ 724.472621] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 724.472621] env[61974]: raise self.value [ 724.472621] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 724.472621] env[61974]: updated_port = self._update_port( [ 724.472621] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 724.472621] env[61974]: _ensure_no_port_binding_failure(port) [ 724.472621] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 724.472621] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 724.473431] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 24fcf52e-4a33-4aaf-b26b-f2df48d0f409, please check neutron logs for more information. [ 724.473431] env[61974]: Removing descriptor: 21 [ 724.473431] env[61974]: ERROR nova.compute.manager [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 24fcf52e-4a33-4aaf-b26b-f2df48d0f409, please check neutron logs for more information. [ 724.473431] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Traceback (most recent call last): [ 724.473431] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 724.473431] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] yield resources [ 724.473431] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 724.473431] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] self.driver.spawn(context, instance, image_meta, [ 724.473431] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 724.473431] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 724.473431] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 724.473431] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] vm_ref = self.build_virtual_machine(instance, [ 724.473717] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 724.473717] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] vif_infos = vmwarevif.get_vif_info(self._session, [ 724.473717] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 724.473717] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] for vif in network_info: [ 724.473717] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 724.473717] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] return self._sync_wrapper(fn, *args, **kwargs) [ 724.473717] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 724.473717] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] self.wait() [ 724.473717] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 724.473717] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] self[:] = self._gt.wait() [ 724.473717] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 724.473717] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] return self._exit_event.wait() [ 724.473717] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 724.474045] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] result = hub.switch() [ 724.474045] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 724.474045] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] return self.greenlet.switch() [ 724.474045] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 724.474045] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] result = function(*args, **kwargs) [ 724.474045] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 724.474045] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] return func(*args, **kwargs) [ 724.474045] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 724.474045] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] raise e [ 724.474045] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 724.474045] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] nwinfo = self.network_api.allocate_for_instance( [ 724.474045] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 724.474045] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] created_port_ids = self._update_ports_for_instance( [ 724.474355] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 724.474355] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] with excutils.save_and_reraise_exception(): [ 724.474355] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 724.474355] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] self.force_reraise() [ 724.474355] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 724.474355] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] raise self.value [ 724.474355] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 724.474355] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] updated_port = self._update_port( [ 724.474355] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 724.474355] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] _ensure_no_port_binding_failure(port) [ 724.474355] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 724.474355] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] raise exception.PortBindingFailed(port_id=port['id']) [ 724.477680] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] nova.exception.PortBindingFailed: Binding failed for port 24fcf52e-4a33-4aaf-b26b-f2df48d0f409, please check neutron logs for more information. [ 724.477680] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] [ 724.477680] env[61974]: INFO nova.compute.manager [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Terminating instance [ 724.477680] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "refresh_cache-c3eb4869-0bde-4398-bf34-3ee6073174e5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.477680] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired lock "refresh_cache-c3eb4869-0bde-4398-bf34-3ee6073174e5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.477680] env[61974]: DEBUG nova.network.neutron [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 724.560270] env[61974]: DEBUG nova.compute.manager [req-6bf896eb-e453-4ff0-9c38-6aca1a94622f req-3fcfea24-7b91-4eb3-ace5-42a6b4f6ce67 service nova] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Received event network-changed-24fcf52e-4a33-4aaf-b26b-f2df48d0f409 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 724.560530] env[61974]: DEBUG nova.compute.manager [req-6bf896eb-e453-4ff0-9c38-6aca1a94622f req-3fcfea24-7b91-4eb3-ace5-42a6b4f6ce67 service nova] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Refreshing instance network info cache due to event network-changed-24fcf52e-4a33-4aaf-b26b-f2df48d0f409. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 724.560756] env[61974]: DEBUG oslo_concurrency.lockutils [req-6bf896eb-e453-4ff0-9c38-6aca1a94622f req-3fcfea24-7b91-4eb3-ace5-42a6b4f6ce67 service nova] Acquiring lock "refresh_cache-c3eb4869-0bde-4398-bf34-3ee6073174e5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.809086] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Task: {'id': task-1378946, 'name': ReconfigVM_Task, 'duration_secs': 0.314825} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.809407] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Reconfigured VM instance instance-0000001a to attach disk [datastore2] 93a736b5-5423-4378-8b0c-73a0c46414ca/93a736b5-5423-4378-8b0c-73a0c46414ca.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 724.810049] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-52b27b8e-09eb-445f-8fad-e755cdbaf37b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.817838] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Waiting for the task: (returnval){ [ 724.817838] env[61974]: value = "task-1378947" [ 724.817838] env[61974]: _type = "Task" [ 724.817838] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.827162] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Task: {'id': task-1378947, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.921388] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Releasing lock "refresh_cache-8a00b16d-8274-4728-920b-a30e95fa4048" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.921388] env[61974]: DEBUG nova.compute.manager [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 724.921388] env[61974]: DEBUG nova.compute.manager [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 724.921388] env[61974]: DEBUG nova.network.neutron [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 724.952806] env[61974]: DEBUG nova.network.neutron [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 724.989117] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Acquiring lock "f290da20-8a42-42f5-8902-136e434d29d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.989117] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Lock "f290da20-8a42-42f5-8902-136e434d29d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.009327] env[61974]: DEBUG nova.network.neutron [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 725.109219] env[61974]: DEBUG nova.network.neutron [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.331896] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Task: {'id': task-1378947, 'name': Rename_Task, 'duration_secs': 0.145968} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.331896] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 725.332218] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-68a0f785-dabf-4884-976c-45cbf6cbaac0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.340383] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Waiting for the task: (returnval){ [ 725.340383] env[61974]: value = "task-1378948" [ 725.340383] env[61974]: _type = "Task" [ 725.340383] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.348954] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Task: {'id': task-1378948, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.459020] env[61974]: DEBUG nova.network.neutron [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.612015] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Releasing lock "refresh_cache-c3eb4869-0bde-4398-bf34-3ee6073174e5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.612617] env[61974]: DEBUG nova.compute.manager [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 725.612939] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 725.613658] env[61974]: DEBUG oslo_concurrency.lockutils [req-6bf896eb-e453-4ff0-9c38-6aca1a94622f req-3fcfea24-7b91-4eb3-ace5-42a6b4f6ce67 service nova] Acquired lock "refresh_cache-c3eb4869-0bde-4398-bf34-3ee6073174e5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.614324] env[61974]: DEBUG nova.network.neutron [req-6bf896eb-e453-4ff0-9c38-6aca1a94622f req-3fcfea24-7b91-4eb3-ace5-42a6b4f6ce67 service nova] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Refreshing network info cache for port 24fcf52e-4a33-4aaf-b26b-f2df48d0f409 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 725.615565] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ae896054-7719-425d-8167-adbc27ffe0dc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.626407] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec803116-7eb5-47fe-a65f-c34e204a5563 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.661957] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c3eb4869-0bde-4398-bf34-3ee6073174e5 could not be found. [ 725.662256] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 725.662462] env[61974]: INFO nova.compute.manager [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Took 0.05 seconds to destroy the instance on the hypervisor. [ 725.662725] env[61974]: DEBUG oslo.service.loopingcall [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 725.662959] env[61974]: DEBUG nova.compute.manager [-] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 725.663065] env[61974]: DEBUG nova.network.neutron [-] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 725.694453] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e1e5dbd-1f06-4aab-9e41-c79636ad5006 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.707021] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1933ffac-83d8-4c67-a4c6-640455b0703e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.708321] env[61974]: DEBUG nova.network.neutron [-] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 725.746707] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-116d9c88-2720-4fc1-bcd6-e2e91fc966b5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.755995] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c2dc57-d9c1-4c28-8fda-e6d998b032b0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.770630] env[61974]: DEBUG nova.compute.provider_tree [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 725.851640] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Task: {'id': task-1378948, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.963110] env[61974]: INFO nova.compute.manager [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 8a00b16d-8274-4728-920b-a30e95fa4048] Took 1.04 seconds to deallocate network for instance. [ 726.145951] env[61974]: DEBUG nova.network.neutron [req-6bf896eb-e453-4ff0-9c38-6aca1a94622f req-3fcfea24-7b91-4eb3-ace5-42a6b4f6ce67 service nova] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.213355] env[61974]: DEBUG nova.network.neutron [-] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.264547] env[61974]: DEBUG nova.network.neutron [req-6bf896eb-e453-4ff0-9c38-6aca1a94622f req-3fcfea24-7b91-4eb3-ace5-42a6b4f6ce67 service nova] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.274085] env[61974]: DEBUG nova.scheduler.client.report [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 726.353810] env[61974]: DEBUG oslo_vmware.api [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Task: {'id': task-1378948, 'name': PowerOnVM_Task, 'duration_secs': 0.860295} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.354150] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 726.358021] env[61974]: DEBUG nova.compute.manager [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 726.358021] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b978a9d-4dee-48cf-abd7-2c6cfea48629 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.626968] env[61974]: DEBUG nova.compute.manager [req-52af5af5-1c88-494a-bc76-1a8538aaafef req-7b32a939-d4b9-470b-9559-a8e5172213ed service nova] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Received event network-vif-deleted-24fcf52e-4a33-4aaf-b26b-f2df48d0f409 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 726.718817] env[61974]: INFO nova.compute.manager [-] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Took 1.06 seconds to deallocate network for instance. [ 726.722271] env[61974]: DEBUG nova.compute.claims [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 726.722465] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.766769] env[61974]: DEBUG oslo_concurrency.lockutils [req-6bf896eb-e453-4ff0-9c38-6aca1a94622f req-3fcfea24-7b91-4eb3-ace5-42a6b4f6ce67 service nova] Releasing lock "refresh_cache-c3eb4869-0bde-4398-bf34-3ee6073174e5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.777997] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.587s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.778884] env[61974]: DEBUG nova.compute.manager [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 726.781851] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 32.231s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.875122] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.004295] env[61974]: INFO nova.scheduler.client.report [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Deleted allocations for instance 8a00b16d-8274-4728-920b-a30e95fa4048 [ 727.231165] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Acquiring lock "93a736b5-5423-4378-8b0c-73a0c46414ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.231465] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Lock "93a736b5-5423-4378-8b0c-73a0c46414ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.232030] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Acquiring lock "93a736b5-5423-4378-8b0c-73a0c46414ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.232133] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Lock "93a736b5-5423-4378-8b0c-73a0c46414ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.232308] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Lock "93a736b5-5423-4378-8b0c-73a0c46414ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.234517] env[61974]: INFO nova.compute.manager [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Terminating instance [ 727.236438] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Acquiring lock "refresh_cache-93a736b5-5423-4378-8b0c-73a0c46414ca" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.236598] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Acquired lock "refresh_cache-93a736b5-5423-4378-8b0c-73a0c46414ca" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.236763] env[61974]: DEBUG nova.network.neutron [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 727.287851] env[61974]: DEBUG nova.compute.utils [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 727.292122] env[61974]: DEBUG nova.compute.manager [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 727.292249] env[61974]: DEBUG nova.network.neutron [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 727.339717] env[61974]: DEBUG nova.policy [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '39f75781c80749919d51e7c589df4ff4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1910786adff94672b078f8e78dec04ec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 727.514759] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a66a35f5-e342-44d5-9013-3ee1d802dd25 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "8a00b16d-8274-4728-920b-a30e95fa4048" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.548s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.691528] env[61974]: DEBUG nova.network.neutron [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Successfully created port: e1fe7370-4293-451f-9204-d8efd7c9927b {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 727.699977] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-069f4932-0e2c-414a-a356-bb2fb27b1820 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.709058] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62c07248-a68e-4701-929d-047e32aa28b1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.743246] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef72dd19-9324-4cca-9efa-64b4df7cdfa4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.751850] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c3575a6-cfff-4e7c-afc3-e966e223114e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.766330] env[61974]: DEBUG nova.compute.provider_tree [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 727.768285] env[61974]: DEBUG nova.network.neutron [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 727.795034] env[61974]: DEBUG nova.compute.manager [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 727.872422] env[61974]: DEBUG nova.network.neutron [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.017860] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 728.271838] env[61974]: DEBUG nova.scheduler.client.report [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 728.278379] env[61974]: DEBUG oslo_concurrency.lockutils [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Acquiring lock "0de509f4-48d8-43ae-9551-80ae414d7c8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.278695] env[61974]: DEBUG oslo_concurrency.lockutils [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Lock "0de509f4-48d8-43ae-9551-80ae414d7c8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.376614] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Releasing lock "refresh_cache-93a736b5-5423-4378-8b0c-73a0c46414ca" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.377163] env[61974]: DEBUG nova.compute.manager [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 728.377364] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 728.378354] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c856798-0163-4161-ae3c-3eb6dbc3db8a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.389985] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 728.391023] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-59168029-040f-437c-8cad-f7d1341aab3f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.398652] env[61974]: DEBUG oslo_vmware.api [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 728.398652] env[61974]: value = "task-1378949" [ 728.398652] env[61974]: _type = "Task" [ 728.398652] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.413260] env[61974]: DEBUG oslo_vmware.api [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378949, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.543129] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.656572] env[61974]: DEBUG nova.compute.manager [req-c6e3b2cd-82df-4806-8b82-6fe9ce5b3db8 req-c9bc663e-91e6-4fbf-a76d-36bdaf957ef6 service nova] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Received event network-changed-e1fe7370-4293-451f-9204-d8efd7c9927b {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 728.656744] env[61974]: DEBUG nova.compute.manager [req-c6e3b2cd-82df-4806-8b82-6fe9ce5b3db8 req-c9bc663e-91e6-4fbf-a76d-36bdaf957ef6 service nova] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Refreshing instance network info cache due to event network-changed-e1fe7370-4293-451f-9204-d8efd7c9927b. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 728.656999] env[61974]: DEBUG oslo_concurrency.lockutils [req-c6e3b2cd-82df-4806-8b82-6fe9ce5b3db8 req-c9bc663e-91e6-4fbf-a76d-36bdaf957ef6 service nova] Acquiring lock "refresh_cache-6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.657159] env[61974]: DEBUG oslo_concurrency.lockutils [req-c6e3b2cd-82df-4806-8b82-6fe9ce5b3db8 req-c9bc663e-91e6-4fbf-a76d-36bdaf957ef6 service nova] Acquired lock "refresh_cache-6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.657321] env[61974]: DEBUG nova.network.neutron [req-c6e3b2cd-82df-4806-8b82-6fe9ce5b3db8 req-c9bc663e-91e6-4fbf-a76d-36bdaf957ef6 service nova] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Refreshing network info cache for port e1fe7370-4293-451f-9204-d8efd7c9927b {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 728.779233] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.997s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.779952] env[61974]: ERROR nova.compute.manager [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 47bce457-b91f-4d3d-88fb-9fdde6f7944a, please check neutron logs for more information. [ 728.779952] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Traceback (most recent call last): [ 728.779952] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 728.779952] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] self.driver.spawn(context, instance, image_meta, [ 728.779952] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 728.779952] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] self._vmops.spawn(context, instance, image_meta, injected_files, [ 728.779952] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 728.779952] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] vm_ref = self.build_virtual_machine(instance, [ 728.779952] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 728.779952] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] vif_infos = vmwarevif.get_vif_info(self._session, [ 728.779952] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 728.780333] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] for vif in network_info: [ 728.780333] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 728.780333] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] return self._sync_wrapper(fn, *args, **kwargs) [ 728.780333] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 728.780333] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] self.wait() [ 728.780333] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 728.780333] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] self[:] = self._gt.wait() [ 728.780333] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 728.780333] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] return self._exit_event.wait() [ 728.780333] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 728.780333] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] result = hub.switch() [ 728.780333] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 728.780333] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] return self.greenlet.switch() [ 728.780721] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 728.780721] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] result = function(*args, **kwargs) [ 728.780721] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 728.780721] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] return func(*args, **kwargs) [ 728.780721] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 728.780721] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] raise e [ 728.780721] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 728.780721] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] nwinfo = self.network_api.allocate_for_instance( [ 728.780721] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 728.780721] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] created_port_ids = self._update_ports_for_instance( [ 728.780721] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 728.780721] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] with excutils.save_and_reraise_exception(): [ 728.780721] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 728.781127] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] self.force_reraise() [ 728.781127] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 728.781127] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] raise self.value [ 728.781127] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 728.781127] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] updated_port = self._update_port( [ 728.781127] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 728.781127] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] _ensure_no_port_binding_failure(port) [ 728.781127] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 728.781127] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] raise exception.PortBindingFailed(port_id=port['id']) [ 728.781127] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] nova.exception.PortBindingFailed: Binding failed for port 47bce457-b91f-4d3d-88fb-9fdde6f7944a, please check neutron logs for more information. [ 728.781127] env[61974]: ERROR nova.compute.manager [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] [ 728.781433] env[61974]: DEBUG nova.compute.utils [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Binding failed for port 47bce457-b91f-4d3d-88fb-9fdde6f7944a, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 728.782258] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.334s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.785282] env[61974]: DEBUG nova.compute.manager [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Build of instance 6f9690ab-8218-4b2c-ba36-682ea7398209 was re-scheduled: Binding failed for port 47bce457-b91f-4d3d-88fb-9fdde6f7944a, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 728.785714] env[61974]: DEBUG nova.compute.manager [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 728.785930] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Acquiring lock "refresh_cache-6f9690ab-8218-4b2c-ba36-682ea7398209" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.786889] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Acquired lock "refresh_cache-6f9690ab-8218-4b2c-ba36-682ea7398209" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.786889] env[61974]: DEBUG nova.network.neutron [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 728.806303] env[61974]: DEBUG nova.compute.manager [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 728.838528] env[61974]: DEBUG nova.virt.hardware [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 728.838789] env[61974]: DEBUG nova.virt.hardware [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 728.838975] env[61974]: DEBUG nova.virt.hardware [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 728.839376] env[61974]: DEBUG nova.virt.hardware [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 728.839612] env[61974]: DEBUG nova.virt.hardware [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 728.839781] env[61974]: DEBUG nova.virt.hardware [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 728.839987] env[61974]: DEBUG nova.virt.hardware [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 728.840168] env[61974]: DEBUG nova.virt.hardware [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 728.840336] env[61974]: DEBUG nova.virt.hardware [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 728.840498] env[61974]: DEBUG nova.virt.hardware [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 728.840669] env[61974]: DEBUG nova.virt.hardware [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 728.841554] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb6bd86-eb91-41dc-ba16-1b8dddd0ed76 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.851230] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f818ea8-4ecd-4775-af0b-c6d1ca814766 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.856132] env[61974]: ERROR nova.compute.manager [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e1fe7370-4293-451f-9204-d8efd7c9927b, please check neutron logs for more information. [ 728.856132] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 728.856132] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 728.856132] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 728.856132] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 728.856132] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 728.856132] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 728.856132] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 728.856132] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 728.856132] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 728.856132] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 728.856132] env[61974]: ERROR nova.compute.manager raise self.value [ 728.856132] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 728.856132] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 728.856132] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 728.856132] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 728.856565] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 728.856565] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 728.856565] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e1fe7370-4293-451f-9204-d8efd7c9927b, please check neutron logs for more information. [ 728.856565] env[61974]: ERROR nova.compute.manager [ 728.856565] env[61974]: Traceback (most recent call last): [ 728.856565] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 728.856565] env[61974]: listener.cb(fileno) [ 728.856565] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 728.856565] env[61974]: result = function(*args, **kwargs) [ 728.856565] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 728.856565] env[61974]: return func(*args, **kwargs) [ 728.856565] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 728.856565] env[61974]: raise e [ 728.856565] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 728.856565] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 728.856565] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 728.856565] env[61974]: created_port_ids = self._update_ports_for_instance( [ 728.856565] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 728.856565] env[61974]: with excutils.save_and_reraise_exception(): [ 728.856565] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 728.856565] env[61974]: self.force_reraise() [ 728.856565] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 728.856565] env[61974]: raise self.value [ 728.856565] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 728.856565] env[61974]: updated_port = self._update_port( [ 728.856565] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 728.856565] env[61974]: _ensure_no_port_binding_failure(port) [ 728.856565] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 728.856565] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 728.857520] env[61974]: nova.exception.PortBindingFailed: Binding failed for port e1fe7370-4293-451f-9204-d8efd7c9927b, please check neutron logs for more information. [ 728.857520] env[61974]: Removing descriptor: 21 [ 728.868336] env[61974]: ERROR nova.compute.manager [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e1fe7370-4293-451f-9204-d8efd7c9927b, please check neutron logs for more information. [ 728.868336] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Traceback (most recent call last): [ 728.868336] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 728.868336] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] yield resources [ 728.868336] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 728.868336] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] self.driver.spawn(context, instance, image_meta, [ 728.868336] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 728.868336] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 728.868336] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 728.868336] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] vm_ref = self.build_virtual_machine(instance, [ 728.868336] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 728.868822] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] vif_infos = vmwarevif.get_vif_info(self._session, [ 728.868822] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 728.868822] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] for vif in network_info: [ 728.868822] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 728.868822] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] return self._sync_wrapper(fn, *args, **kwargs) [ 728.868822] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 728.868822] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] self.wait() [ 728.868822] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 728.868822] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] self[:] = self._gt.wait() [ 728.868822] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 728.868822] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] return self._exit_event.wait() [ 728.868822] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 728.868822] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] current.throw(*self._exc) [ 728.869152] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 728.869152] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] result = function(*args, **kwargs) [ 728.869152] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 728.869152] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] return func(*args, **kwargs) [ 728.869152] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 728.869152] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] raise e [ 728.869152] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 728.869152] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] nwinfo = self.network_api.allocate_for_instance( [ 728.869152] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 728.869152] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] created_port_ids = self._update_ports_for_instance( [ 728.869152] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 728.869152] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] with excutils.save_and_reraise_exception(): [ 728.869152] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 728.869454] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] self.force_reraise() [ 728.869454] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 728.869454] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] raise self.value [ 728.869454] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 728.869454] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] updated_port = self._update_port( [ 728.869454] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 728.869454] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] _ensure_no_port_binding_failure(port) [ 728.869454] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 728.869454] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] raise exception.PortBindingFailed(port_id=port['id']) [ 728.869454] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] nova.exception.PortBindingFailed: Binding failed for port e1fe7370-4293-451f-9204-d8efd7c9927b, please check neutron logs for more information. [ 728.869454] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] [ 728.869454] env[61974]: INFO nova.compute.manager [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Terminating instance [ 728.870790] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Acquiring lock "refresh_cache-6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.908991] env[61974]: DEBUG oslo_vmware.api [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378949, 'name': PowerOffVM_Task, 'duration_secs': 0.207184} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.909297] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 728.909466] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 728.909715] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4344f77c-27df-47a4-8a53-1ea4f9eefafc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.939596] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 728.939804] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 728.939974] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Deleting the datastore file [datastore2] 93a736b5-5423-4378-8b0c-73a0c46414ca {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 728.940253] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e8359a3-4ea7-42e5-a9ee-b82bd35f142d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.949512] env[61974]: DEBUG oslo_vmware.api [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for the task: (returnval){ [ 728.949512] env[61974]: value = "task-1378951" [ 728.949512] env[61974]: _type = "Task" [ 728.949512] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.956889] env[61974]: DEBUG oslo_vmware.api [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378951, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.176272] env[61974]: DEBUG nova.network.neutron [req-c6e3b2cd-82df-4806-8b82-6fe9ce5b3db8 req-c9bc663e-91e6-4fbf-a76d-36bdaf957ef6 service nova] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.278653] env[61974]: DEBUG nova.network.neutron [req-c6e3b2cd-82df-4806-8b82-6fe9ce5b3db8 req-c9bc663e-91e6-4fbf-a76d-36bdaf957ef6 service nova] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.313795] env[61974]: DEBUG nova.network.neutron [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.381455] env[61974]: DEBUG nova.network.neutron [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.461722] env[61974]: DEBUG oslo_vmware.api [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Task: {'id': task-1378951, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.11215} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.464138] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 729.464467] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 729.464902] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 729.464902] env[61974]: INFO nova.compute.manager [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Took 1.09 seconds to destroy the instance on the hypervisor. [ 729.465069] env[61974]: DEBUG oslo.service.loopingcall [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 729.466138] env[61974]: DEBUG nova.compute.manager [-] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 729.466259] env[61974]: DEBUG nova.network.neutron [-] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 729.481015] env[61974]: DEBUG nova.network.neutron [-] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.754874] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad5b7db-2504-4b4a-a0a0-4796685375a5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.763131] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad774fd-1ac5-42aa-99d2-67240d2ff0ad {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.793432] env[61974]: DEBUG oslo_concurrency.lockutils [req-c6e3b2cd-82df-4806-8b82-6fe9ce5b3db8 req-c9bc663e-91e6-4fbf-a76d-36bdaf957ef6 service nova] Releasing lock "refresh_cache-6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.794039] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Acquired lock "refresh_cache-6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.794239] env[61974]: DEBUG nova.network.neutron [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 729.795813] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c26794-e279-4472-87a1-24ab6b12b43a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.804281] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7792b2f9-8574-4fec-996e-ab0d62d04348 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.818605] env[61974]: DEBUG nova.compute.provider_tree [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 729.881289] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Releasing lock "refresh_cache-6f9690ab-8218-4b2c-ba36-682ea7398209" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.881540] env[61974]: DEBUG nova.compute.manager [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 729.881701] env[61974]: DEBUG nova.compute.manager [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 729.881862] env[61974]: DEBUG nova.network.neutron [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 729.897415] env[61974]: DEBUG nova.network.neutron [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.984112] env[61974]: DEBUG nova.network.neutron [-] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.322011] env[61974]: DEBUG nova.scheduler.client.report [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 730.325786] env[61974]: DEBUG nova.network.neutron [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 730.400477] env[61974]: DEBUG nova.network.neutron [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.417629] env[61974]: DEBUG nova.network.neutron [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.451630] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "5f62ae7a-126f-42ce-9579-57ca02c871d8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.451831] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "5f62ae7a-126f-42ce-9579-57ca02c871d8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.486938] env[61974]: INFO nova.compute.manager [-] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Took 1.02 seconds to deallocate network for instance. [ 730.681585] env[61974]: DEBUG nova.compute.manager [req-7d3b641b-8313-449f-9939-187256288487 req-555035d3-72f1-4682-b1cd-0cb59f08302d service nova] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Received event network-vif-deleted-e1fe7370-4293-451f-9204-d8efd7c9927b {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 730.831271] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.047s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.831271] env[61974]: ERROR nova.compute.manager [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 51a2c2ab-d715-49cf-a9e0-431b116e7551, please check neutron logs for more information. [ 730.831271] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Traceback (most recent call last): [ 730.831271] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 730.831271] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] self.driver.spawn(context, instance, image_meta, [ 730.831271] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 730.831271] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 730.831271] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 730.831271] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] vm_ref = self.build_virtual_machine(instance, [ 730.831844] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 730.831844] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] vif_infos = vmwarevif.get_vif_info(self._session, [ 730.831844] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 730.831844] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] for vif in network_info: [ 730.831844] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 730.831844] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] return self._sync_wrapper(fn, *args, **kwargs) [ 730.831844] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 730.831844] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] self.wait() [ 730.831844] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 730.831844] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] self[:] = self._gt.wait() [ 730.831844] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 730.831844] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] return self._exit_event.wait() [ 730.831844] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 730.832225] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] result = hub.switch() [ 730.832225] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 730.832225] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] return self.greenlet.switch() [ 730.832225] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 730.832225] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] result = function(*args, **kwargs) [ 730.832225] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 730.832225] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] return func(*args, **kwargs) [ 730.832225] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 730.832225] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] raise e [ 730.832225] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 730.832225] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] nwinfo = self.network_api.allocate_for_instance( [ 730.832225] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 730.832225] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] created_port_ids = self._update_ports_for_instance( [ 730.832591] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 730.832591] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] with excutils.save_and_reraise_exception(): [ 730.832591] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 730.832591] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] self.force_reraise() [ 730.832591] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 730.832591] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] raise self.value [ 730.832591] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 730.832591] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] updated_port = self._update_port( [ 730.832591] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 730.832591] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] _ensure_no_port_binding_failure(port) [ 730.832591] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 730.832591] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] raise exception.PortBindingFailed(port_id=port['id']) [ 730.832929] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] nova.exception.PortBindingFailed: Binding failed for port 51a2c2ab-d715-49cf-a9e0-431b116e7551, please check neutron logs for more information. [ 730.832929] env[61974]: ERROR nova.compute.manager [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] [ 730.832929] env[61974]: DEBUG nova.compute.utils [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Binding failed for port 51a2c2ab-d715-49cf-a9e0-431b116e7551, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 730.837020] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.181s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.837020] env[61974]: DEBUG nova.compute.manager [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Build of instance 26cb158a-04fa-4031-b099-34dfe8a762cc was re-scheduled: Binding failed for port 51a2c2ab-d715-49cf-a9e0-431b116e7551, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 730.837020] env[61974]: DEBUG nova.compute.manager [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 730.837631] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Acquiring lock "refresh_cache-26cb158a-04fa-4031-b099-34dfe8a762cc" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.837917] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Acquired lock "refresh_cache-26cb158a-04fa-4031-b099-34dfe8a762cc" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.838225] env[61974]: DEBUG nova.network.neutron [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 730.903571] env[61974]: INFO nova.compute.manager [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] [instance: 6f9690ab-8218-4b2c-ba36-682ea7398209] Took 1.02 seconds to deallocate network for instance. [ 730.919917] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Releasing lock "refresh_cache-6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.920443] env[61974]: DEBUG nova.compute.manager [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 730.920768] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 730.920976] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8a75975f-bcc6-45d0-a024-1b8db7359470 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.933096] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e49addc-9274-4c40-82b8-531ab4eb1ef9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.960890] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c could not be found. [ 730.961154] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 730.961335] env[61974]: INFO nova.compute.manager [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 730.961577] env[61974]: DEBUG oslo.service.loopingcall [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 730.961813] env[61974]: DEBUG nova.compute.manager [-] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 730.961917] env[61974]: DEBUG nova.network.neutron [-] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 730.977238] env[61974]: DEBUG nova.network.neutron [-] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 730.992933] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.361138] env[61974]: DEBUG nova.network.neutron [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 731.422101] env[61974]: DEBUG nova.network.neutron [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.481239] env[61974]: DEBUG nova.network.neutron [-] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.683205] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a342bf3-083f-44b9-b253-f9114e3f472f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.691526] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16944958-aecb-4e64-aafc-c22a3596c072 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.720902] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fddadc94-ada9-4425-85b7-88df08c1afb2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.728822] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fba042e-8285-4a4a-ba52-48ac3a9fdeb0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.744137] env[61974]: DEBUG nova.compute.provider_tree [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.927178] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Releasing lock "refresh_cache-26cb158a-04fa-4031-b099-34dfe8a762cc" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.927400] env[61974]: DEBUG nova.compute.manager [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 731.927584] env[61974]: DEBUG nova.compute.manager [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 731.927864] env[61974]: DEBUG nova.network.neutron [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 731.932233] env[61974]: INFO nova.scheduler.client.report [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Deleted allocations for instance 6f9690ab-8218-4b2c-ba36-682ea7398209 [ 731.945642] env[61974]: DEBUG nova.network.neutron [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 731.983397] env[61974]: INFO nova.compute.manager [-] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Took 1.02 seconds to deallocate network for instance. [ 731.985903] env[61974]: DEBUG nova.compute.claims [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 731.986903] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.247594] env[61974]: DEBUG nova.scheduler.client.report [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 732.439518] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce6f1b4b-66dd-4d6d-99bf-c20bbe699d12 tempest-ServersAdminNegativeTestJSON-2084398908 tempest-ServersAdminNegativeTestJSON-2084398908-project-member] Lock "6f9690ab-8218-4b2c-ba36-682ea7398209" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.561s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.448822] env[61974]: DEBUG nova.network.neutron [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.752465] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.919s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.753088] env[61974]: ERROR nova.compute.manager [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 66718558-8503-4606-adc8-14e9b6d34c45, please check neutron logs for more information. [ 732.753088] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Traceback (most recent call last): [ 732.753088] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 732.753088] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] self.driver.spawn(context, instance, image_meta, [ 732.753088] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 732.753088] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] self._vmops.spawn(context, instance, image_meta, injected_files, [ 732.753088] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 732.753088] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] vm_ref = self.build_virtual_machine(instance, [ 732.753088] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 732.753088] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] vif_infos = vmwarevif.get_vif_info(self._session, [ 732.753088] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 732.753439] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] for vif in network_info: [ 732.753439] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 732.753439] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] return self._sync_wrapper(fn, *args, **kwargs) [ 732.753439] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 732.753439] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] self.wait() [ 732.753439] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 732.753439] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] self[:] = self._gt.wait() [ 732.753439] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 732.753439] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] return self._exit_event.wait() [ 732.753439] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 732.753439] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] current.throw(*self._exc) [ 732.753439] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 732.753439] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] result = function(*args, **kwargs) [ 732.753765] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 732.753765] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] return func(*args, **kwargs) [ 732.753765] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 732.753765] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] raise e [ 732.753765] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 732.753765] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] nwinfo = self.network_api.allocate_for_instance( [ 732.753765] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 732.753765] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] created_port_ids = self._update_ports_for_instance( [ 732.753765] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 732.753765] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] with excutils.save_and_reraise_exception(): [ 732.753765] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 732.753765] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] self.force_reraise() [ 732.753765] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 732.754142] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] raise self.value [ 732.754142] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 732.754142] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] updated_port = self._update_port( [ 732.754142] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 732.754142] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] _ensure_no_port_binding_failure(port) [ 732.754142] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 732.754142] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] raise exception.PortBindingFailed(port_id=port['id']) [ 732.754142] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] nova.exception.PortBindingFailed: Binding failed for port 66718558-8503-4606-adc8-14e9b6d34c45, please check neutron logs for more information. [ 732.754142] env[61974]: ERROR nova.compute.manager [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] [ 732.754142] env[61974]: DEBUG nova.compute.utils [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Binding failed for port 66718558-8503-4606-adc8-14e9b6d34c45, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 732.755386] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.139s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.756928] env[61974]: INFO nova.compute.claims [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 732.760051] env[61974]: DEBUG nova.compute.manager [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Build of instance 67ca9fb2-9ca0-4fca-956e-961d5011df35 was re-scheduled: Binding failed for port 66718558-8503-4606-adc8-14e9b6d34c45, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 732.760196] env[61974]: DEBUG nova.compute.manager [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 732.760368] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Acquiring lock "refresh_cache-67ca9fb2-9ca0-4fca-956e-961d5011df35" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.760532] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Acquired lock "refresh_cache-67ca9fb2-9ca0-4fca-956e-961d5011df35" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.760693] env[61974]: DEBUG nova.network.neutron [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 732.942148] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 732.951762] env[61974]: INFO nova.compute.manager [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 26cb158a-04fa-4031-b099-34dfe8a762cc] Took 1.02 seconds to deallocate network for instance. [ 733.284109] env[61974]: DEBUG nova.network.neutron [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 733.349155] env[61974]: DEBUG nova.network.neutron [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.465623] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.851850] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Releasing lock "refresh_cache-67ca9fb2-9ca0-4fca-956e-961d5011df35" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 733.852099] env[61974]: DEBUG nova.compute.manager [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 733.852259] env[61974]: DEBUG nova.compute.manager [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 733.853063] env[61974]: DEBUG nova.network.neutron [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 733.874651] env[61974]: DEBUG nova.network.neutron [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 733.980591] env[61974]: INFO nova.scheduler.client.report [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Deleted allocations for instance 26cb158a-04fa-4031-b099-34dfe8a762cc [ 734.141607] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa0656c-1200-4c67-8080-2611497207db {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.149754] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ebb524-217d-4dc2-8d10-c4e9be352bcc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.181311] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2cf21f4-2163-4387-bad8-92aeae354caa {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.190380] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e994aaf-798b-402a-99c9-1ab88d289ebe {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.205741] env[61974]: DEBUG nova.compute.provider_tree [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.377262] env[61974]: DEBUG nova.network.neutron [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.490391] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65dec34f-52d9-47ca-a0c3-47df9044802f tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Lock "26cb158a-04fa-4031-b099-34dfe8a762cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.601s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.709326] env[61974]: DEBUG nova.scheduler.client.report [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 734.880387] env[61974]: INFO nova.compute.manager [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] [instance: 67ca9fb2-9ca0-4fca-956e-961d5011df35] Took 1.03 seconds to deallocate network for instance. [ 734.993613] env[61974]: DEBUG nova.compute.manager [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 735.214144] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.459s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.214735] env[61974]: DEBUG nova.compute.manager [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 735.217342] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.605s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.217553] env[61974]: DEBUG nova.objects.instance [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61974) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 735.518336] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.723458] env[61974]: DEBUG nova.compute.utils [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 735.735312] env[61974]: DEBUG nova.compute.manager [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 735.735312] env[61974]: DEBUG nova.network.neutron [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 735.826029] env[61974]: DEBUG nova.policy [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '899c602715264d5394220eecd463eba7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c7a05cbd31484dcfa56cbe6de921d590', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 735.913297] env[61974]: INFO nova.scheduler.client.report [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Deleted allocations for instance 67ca9fb2-9ca0-4fca-956e-961d5011df35 [ 736.235185] env[61974]: DEBUG nova.compute.manager [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 736.241341] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4cb9928e-98f3-4eef-9680-4ce88817e879 tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.022s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.241341] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.250s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.244236] env[61974]: INFO nova.compute.claims [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 736.355382] env[61974]: DEBUG nova.network.neutron [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Successfully created port: b352c901-672c-42ad-a6f4-9e0dd6677a7d {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 736.428111] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65b127b4-0034-4897-9201-d34b2ff60391 tempest-ListImageFiltersTestJSON-2033042234 tempest-ListImageFiltersTestJSON-2033042234-project-member] Lock "67ca9fb2-9ca0-4fca-956e-961d5011df35" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.627s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.931588] env[61974]: DEBUG nova.compute.manager [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 737.252277] env[61974]: DEBUG nova.compute.manager [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 737.300202] env[61974]: DEBUG nova.virt.hardware [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 737.300515] env[61974]: DEBUG nova.virt.hardware [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 737.301071] env[61974]: DEBUG nova.virt.hardware [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 737.301071] env[61974]: DEBUG nova.virt.hardware [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 737.301207] env[61974]: DEBUG nova.virt.hardware [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 737.301360] env[61974]: DEBUG nova.virt.hardware [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 737.301563] env[61974]: DEBUG nova.virt.hardware [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 737.301816] env[61974]: DEBUG nova.virt.hardware [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 737.301880] env[61974]: DEBUG nova.virt.hardware [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 737.302299] env[61974]: DEBUG nova.virt.hardware [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 737.302677] env[61974]: DEBUG nova.virt.hardware [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 737.303831] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-734d87e0-998b-4435-bc1d-4b88483dbc3e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.313724] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab99e1b-c957-42a3-bd7b-8d6968a21f63 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.459084] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.731507] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02810b8b-b9df-442c-82e2-5ce86db6371a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.740762] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67585e41-4cc7-4171-84c6-e41d0bfb69cc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.748985] env[61974]: DEBUG nova.compute.manager [req-0beca70c-4e18-4e5e-9f31-1e729f74c8fd req-3ada1a21-8b12-42e5-bbc7-4cb4e3244c85 service nova] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Received event network-changed-b352c901-672c-42ad-a6f4-9e0dd6677a7d {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 737.749332] env[61974]: DEBUG nova.compute.manager [req-0beca70c-4e18-4e5e-9f31-1e729f74c8fd req-3ada1a21-8b12-42e5-bbc7-4cb4e3244c85 service nova] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Refreshing instance network info cache due to event network-changed-b352c901-672c-42ad-a6f4-9e0dd6677a7d. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 737.749549] env[61974]: DEBUG oslo_concurrency.lockutils [req-0beca70c-4e18-4e5e-9f31-1e729f74c8fd req-3ada1a21-8b12-42e5-bbc7-4cb4e3244c85 service nova] Acquiring lock "refresh_cache-9f781418-6149-4c73-aaa0-20c8cbc8c482" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.749772] env[61974]: DEBUG oslo_concurrency.lockutils [req-0beca70c-4e18-4e5e-9f31-1e729f74c8fd req-3ada1a21-8b12-42e5-bbc7-4cb4e3244c85 service nova] Acquired lock "refresh_cache-9f781418-6149-4c73-aaa0-20c8cbc8c482" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.752781] env[61974]: DEBUG nova.network.neutron [req-0beca70c-4e18-4e5e-9f31-1e729f74c8fd req-3ada1a21-8b12-42e5-bbc7-4cb4e3244c85 service nova] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Refreshing network info cache for port b352c901-672c-42ad-a6f4-9e0dd6677a7d {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 737.785184] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1c5cdae-6da8-42f7-8973-1c485ea808d4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.796021] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a935069d-1622-4371-8c95-59e5587f4967 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.811259] env[61974]: DEBUG nova.compute.provider_tree [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.907813] env[61974]: ERROR nova.compute.manager [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b352c901-672c-42ad-a6f4-9e0dd6677a7d, please check neutron logs for more information. [ 737.907813] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 737.907813] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 737.907813] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 737.907813] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 737.907813] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 737.907813] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 737.907813] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 737.907813] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 737.907813] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 737.907813] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 737.907813] env[61974]: ERROR nova.compute.manager raise self.value [ 737.907813] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 737.907813] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 737.907813] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 737.907813] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 737.909341] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 737.909341] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 737.909341] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b352c901-672c-42ad-a6f4-9e0dd6677a7d, please check neutron logs for more information. [ 737.909341] env[61974]: ERROR nova.compute.manager [ 737.909341] env[61974]: Traceback (most recent call last): [ 737.909341] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 737.909341] env[61974]: listener.cb(fileno) [ 737.909341] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 737.909341] env[61974]: result = function(*args, **kwargs) [ 737.909341] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 737.909341] env[61974]: return func(*args, **kwargs) [ 737.909341] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 737.909341] env[61974]: raise e [ 737.909341] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 737.909341] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 737.909341] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 737.909341] env[61974]: created_port_ids = self._update_ports_for_instance( [ 737.909341] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 737.909341] env[61974]: with excutils.save_and_reraise_exception(): [ 737.909341] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 737.909341] env[61974]: self.force_reraise() [ 737.909341] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 737.909341] env[61974]: raise self.value [ 737.909341] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 737.909341] env[61974]: updated_port = self._update_port( [ 737.909341] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 737.909341] env[61974]: _ensure_no_port_binding_failure(port) [ 737.909341] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 737.909341] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 737.910077] env[61974]: nova.exception.PortBindingFailed: Binding failed for port b352c901-672c-42ad-a6f4-9e0dd6677a7d, please check neutron logs for more information. [ 737.910077] env[61974]: Removing descriptor: 21 [ 737.910077] env[61974]: ERROR nova.compute.manager [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b352c901-672c-42ad-a6f4-9e0dd6677a7d, please check neutron logs for more information. [ 737.910077] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Traceback (most recent call last): [ 737.910077] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 737.910077] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] yield resources [ 737.910077] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 737.910077] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] self.driver.spawn(context, instance, image_meta, [ 737.910077] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 737.910077] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] self._vmops.spawn(context, instance, image_meta, injected_files, [ 737.910077] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 737.910077] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] vm_ref = self.build_virtual_machine(instance, [ 737.910375] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 737.910375] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] vif_infos = vmwarevif.get_vif_info(self._session, [ 737.910375] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 737.910375] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] for vif in network_info: [ 737.910375] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 737.910375] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] return self._sync_wrapper(fn, *args, **kwargs) [ 737.910375] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 737.910375] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] self.wait() [ 737.910375] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 737.910375] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] self[:] = self._gt.wait() [ 737.910375] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 737.910375] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] return self._exit_event.wait() [ 737.910375] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 737.910698] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] result = hub.switch() [ 737.910698] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 737.910698] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] return self.greenlet.switch() [ 737.910698] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 737.910698] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] result = function(*args, **kwargs) [ 737.910698] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 737.910698] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] return func(*args, **kwargs) [ 737.910698] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 737.910698] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] raise e [ 737.910698] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 737.910698] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] nwinfo = self.network_api.allocate_for_instance( [ 737.910698] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 737.910698] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] created_port_ids = self._update_ports_for_instance( [ 737.911057] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 737.911057] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] with excutils.save_and_reraise_exception(): [ 737.911057] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 737.911057] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] self.force_reraise() [ 737.911057] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 737.911057] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] raise self.value [ 737.911057] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 737.911057] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] updated_port = self._update_port( [ 737.911057] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 737.911057] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] _ensure_no_port_binding_failure(port) [ 737.911057] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 737.911057] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] raise exception.PortBindingFailed(port_id=port['id']) [ 737.911368] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] nova.exception.PortBindingFailed: Binding failed for port b352c901-672c-42ad-a6f4-9e0dd6677a7d, please check neutron logs for more information. [ 737.911368] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] [ 737.911368] env[61974]: INFO nova.compute.manager [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Terminating instance [ 737.913086] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Acquiring lock "refresh_cache-9f781418-6149-4c73-aaa0-20c8cbc8c482" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.274068] env[61974]: DEBUG nova.network.neutron [req-0beca70c-4e18-4e5e-9f31-1e729f74c8fd req-3ada1a21-8b12-42e5-bbc7-4cb4e3244c85 service nova] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 738.318054] env[61974]: DEBUG nova.scheduler.client.report [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 738.376544] env[61974]: DEBUG nova.network.neutron [req-0beca70c-4e18-4e5e-9f31-1e729f74c8fd req-3ada1a21-8b12-42e5-bbc7-4cb4e3244c85 service nova] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.522564] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Acquiring lock "4545e438-8784-4911-bf2e-8eb14d38c308" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.522564] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Lock "4545e438-8784-4911-bf2e-8eb14d38c308" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.824060] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.580s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.824060] env[61974]: DEBUG nova.compute.manager [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 738.826786] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.153s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.878103] env[61974]: DEBUG oslo_concurrency.lockutils [req-0beca70c-4e18-4e5e-9f31-1e729f74c8fd req-3ada1a21-8b12-42e5-bbc7-4cb4e3244c85 service nova] Releasing lock "refresh_cache-9f781418-6149-4c73-aaa0-20c8cbc8c482" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.878731] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Acquired lock "refresh_cache-9f781418-6149-4c73-aaa0-20c8cbc8c482" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.878917] env[61974]: DEBUG nova.network.neutron [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 739.331652] env[61974]: DEBUG nova.compute.utils [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 739.341262] env[61974]: DEBUG nova.compute.manager [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 739.341262] env[61974]: DEBUG nova.network.neutron [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 739.410065] env[61974]: DEBUG nova.network.neutron [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 739.437263] env[61974]: DEBUG nova.policy [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6966f95c2e334310bdef5e43881dc3df', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6140fc577f7a47e5ad4df35bb8dc96e2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 739.590994] env[61974]: DEBUG nova.network.neutron [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.791840] env[61974]: DEBUG nova.compute.manager [req-f7cad4d8-b8e8-4769-a7c1-1dc134c349c3 req-fa4dd6a6-b7e3-46af-91c2-61f2ab6cd2c8 service nova] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Received event network-vif-deleted-b352c901-672c-42ad-a6f4-9e0dd6677a7d {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 739.814251] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88106fae-4701-45fa-9a7c-9f1224f255f9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.831638] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1543096c-5f57-4a0a-a1c1-9ac32f2cd20c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.889464] env[61974]: DEBUG nova.compute.manager [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 739.896752] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79436f17-27e6-474b-95e6-447d7aa3581a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.909688] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b96af53-f63c-406d-9790-b35c80e911c1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.919681] env[61974]: DEBUG nova.network.neutron [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Successfully created port: b46d1d24-64da-4fe2-bd3e-ed3715b818e7 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 739.941455] env[61974]: DEBUG nova.compute.provider_tree [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 740.096520] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Releasing lock "refresh_cache-9f781418-6149-4c73-aaa0-20c8cbc8c482" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.097246] env[61974]: DEBUG nova.compute.manager [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 740.099974] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 740.099974] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cef9b6e3-7963-4250-999f-4fb6d352761f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.113618] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9323c903-ca8f-4cff-8a37-bdebc4ae4230 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.139018] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9f781418-6149-4c73-aaa0-20c8cbc8c482 could not be found. [ 740.139255] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 740.139440] env[61974]: INFO nova.compute.manager [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Took 0.04 seconds to destroy the instance on the hypervisor. [ 740.139689] env[61974]: DEBUG oslo.service.loopingcall [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 740.139934] env[61974]: DEBUG nova.compute.manager [-] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 740.140071] env[61974]: DEBUG nova.network.neutron [-] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 740.168949] env[61974]: DEBUG nova.network.neutron [-] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 740.449564] env[61974]: DEBUG nova.scheduler.client.report [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 740.672525] env[61974]: DEBUG nova.network.neutron [-] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.910527] env[61974]: DEBUG nova.compute.manager [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 740.940957] env[61974]: DEBUG nova.virt.hardware [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 740.941485] env[61974]: DEBUG nova.virt.hardware [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 740.941779] env[61974]: DEBUG nova.virt.hardware [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 740.942126] env[61974]: DEBUG nova.virt.hardware [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 740.942400] env[61974]: DEBUG nova.virt.hardware [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 740.942662] env[61974]: DEBUG nova.virt.hardware [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 740.942979] env[61974]: DEBUG nova.virt.hardware [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 740.944025] env[61974]: DEBUG nova.virt.hardware [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 740.944025] env[61974]: DEBUG nova.virt.hardware [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 740.944025] env[61974]: DEBUG nova.virt.hardware [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 740.944025] env[61974]: DEBUG nova.virt.hardware [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 740.944849] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad595ba-6a25-4e27-bcf0-ee29733d7760 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.957715] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4566b8-6871-40a2-a75c-c4bfdcbfb489 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.961853] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.135s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.963112] env[61974]: ERROR nova.compute.manager [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 43527828-744c-447a-a35e-b7a23fac5978, please check neutron logs for more information. [ 740.963112] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Traceback (most recent call last): [ 740.963112] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 740.963112] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] self.driver.spawn(context, instance, image_meta, [ 740.963112] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 740.963112] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] self._vmops.spawn(context, instance, image_meta, injected_files, [ 740.963112] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 740.963112] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] vm_ref = self.build_virtual_machine(instance, [ 740.963112] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 740.963112] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] vif_infos = vmwarevif.get_vif_info(self._session, [ 740.963112] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 740.963415] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] for vif in network_info: [ 740.963415] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 740.963415] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] return self._sync_wrapper(fn, *args, **kwargs) [ 740.963415] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 740.963415] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] self.wait() [ 740.963415] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 740.963415] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] self[:] = self._gt.wait() [ 740.963415] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 740.963415] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] return self._exit_event.wait() [ 740.963415] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 740.963415] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] current.throw(*self._exc) [ 740.963415] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 740.963415] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] result = function(*args, **kwargs) [ 740.963725] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 740.963725] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] return func(*args, **kwargs) [ 740.963725] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 740.963725] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] raise e [ 740.963725] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 740.963725] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] nwinfo = self.network_api.allocate_for_instance( [ 740.963725] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 740.963725] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] created_port_ids = self._update_ports_for_instance( [ 740.963725] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 740.963725] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] with excutils.save_and_reraise_exception(): [ 740.963725] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 740.963725] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] self.force_reraise() [ 740.963725] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 740.964094] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] raise self.value [ 740.964094] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 740.964094] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] updated_port = self._update_port( [ 740.964094] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 740.964094] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] _ensure_no_port_binding_failure(port) [ 740.964094] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 740.964094] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] raise exception.PortBindingFailed(port_id=port['id']) [ 740.964094] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] nova.exception.PortBindingFailed: Binding failed for port 43527828-744c-447a-a35e-b7a23fac5978, please check neutron logs for more information. [ 740.964094] env[61974]: ERROR nova.compute.manager [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] [ 740.964094] env[61974]: DEBUG nova.compute.utils [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Binding failed for port 43527828-744c-447a-a35e-b7a23fac5978, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 740.964664] env[61974]: DEBUG nova.compute.manager [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Build of instance ccc4d6d9-979a-468a-9b7a-4633662c4052 was re-scheduled: Binding failed for port 43527828-744c-447a-a35e-b7a23fac5978, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 740.965067] env[61974]: DEBUG nova.compute.manager [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 740.965304] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Acquiring lock "refresh_cache-ccc4d6d9-979a-468a-9b7a-4633662c4052" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.965492] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Acquired lock "refresh_cache-ccc4d6d9-979a-468a-9b7a-4633662c4052" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.965686] env[61974]: DEBUG nova.network.neutron [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 740.967113] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.616s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.969467] env[61974]: INFO nova.compute.claims [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 741.175025] env[61974]: INFO nova.compute.manager [-] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Took 1.03 seconds to deallocate network for instance. [ 741.177605] env[61974]: DEBUG nova.compute.claims [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 741.177796] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.444940] env[61974]: ERROR nova.compute.manager [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b46d1d24-64da-4fe2-bd3e-ed3715b818e7, please check neutron logs for more information. [ 741.444940] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 741.444940] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 741.444940] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 741.444940] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 741.444940] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 741.444940] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 741.444940] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 741.444940] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 741.444940] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 741.444940] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 741.444940] env[61974]: ERROR nova.compute.manager raise self.value [ 741.444940] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 741.444940] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 741.444940] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 741.444940] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 741.445490] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 741.445490] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 741.445490] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b46d1d24-64da-4fe2-bd3e-ed3715b818e7, please check neutron logs for more information. [ 741.445490] env[61974]: ERROR nova.compute.manager [ 741.445490] env[61974]: Traceback (most recent call last): [ 741.445490] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 741.445490] env[61974]: listener.cb(fileno) [ 741.445490] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 741.445490] env[61974]: result = function(*args, **kwargs) [ 741.445490] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 741.445490] env[61974]: return func(*args, **kwargs) [ 741.445490] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 741.445490] env[61974]: raise e [ 741.445490] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 741.445490] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 741.445490] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 741.445490] env[61974]: created_port_ids = self._update_ports_for_instance( [ 741.445490] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 741.445490] env[61974]: with excutils.save_and_reraise_exception(): [ 741.445490] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 741.445490] env[61974]: self.force_reraise() [ 741.445490] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 741.445490] env[61974]: raise self.value [ 741.445490] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 741.445490] env[61974]: updated_port = self._update_port( [ 741.445490] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 741.445490] env[61974]: _ensure_no_port_binding_failure(port) [ 741.445490] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 741.445490] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 741.446296] env[61974]: nova.exception.PortBindingFailed: Binding failed for port b46d1d24-64da-4fe2-bd3e-ed3715b818e7, please check neutron logs for more information. [ 741.446296] env[61974]: Removing descriptor: 21 [ 741.446296] env[61974]: ERROR nova.compute.manager [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b46d1d24-64da-4fe2-bd3e-ed3715b818e7, please check neutron logs for more information. [ 741.446296] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Traceback (most recent call last): [ 741.446296] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 741.446296] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] yield resources [ 741.446296] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 741.446296] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] self.driver.spawn(context, instance, image_meta, [ 741.446296] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 741.446296] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 741.446296] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 741.446296] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] vm_ref = self.build_virtual_machine(instance, [ 741.446660] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 741.446660] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] vif_infos = vmwarevif.get_vif_info(self._session, [ 741.446660] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 741.446660] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] for vif in network_info: [ 741.446660] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 741.446660] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] return self._sync_wrapper(fn, *args, **kwargs) [ 741.446660] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 741.446660] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] self.wait() [ 741.446660] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 741.446660] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] self[:] = self._gt.wait() [ 741.446660] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 741.446660] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] return self._exit_event.wait() [ 741.446660] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 741.447146] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] result = hub.switch() [ 741.447146] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 741.447146] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] return self.greenlet.switch() [ 741.447146] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 741.447146] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] result = function(*args, **kwargs) [ 741.447146] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 741.447146] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] return func(*args, **kwargs) [ 741.447146] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 741.447146] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] raise e [ 741.447146] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 741.447146] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] nwinfo = self.network_api.allocate_for_instance( [ 741.447146] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 741.447146] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] created_port_ids = self._update_ports_for_instance( [ 741.447472] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 741.447472] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] with excutils.save_and_reraise_exception(): [ 741.447472] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 741.447472] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] self.force_reraise() [ 741.447472] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 741.447472] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] raise self.value [ 741.447472] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 741.447472] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] updated_port = self._update_port( [ 741.447472] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 741.447472] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] _ensure_no_port_binding_failure(port) [ 741.447472] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 741.447472] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] raise exception.PortBindingFailed(port_id=port['id']) [ 741.447766] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] nova.exception.PortBindingFailed: Binding failed for port b46d1d24-64da-4fe2-bd3e-ed3715b818e7, please check neutron logs for more information. [ 741.447766] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] [ 741.447766] env[61974]: INFO nova.compute.manager [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Terminating instance [ 741.451928] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Acquiring lock "refresh_cache-9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.451928] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Acquired lock "refresh_cache-9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.451928] env[61974]: DEBUG nova.network.neutron [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 741.488519] env[61974]: DEBUG nova.network.neutron [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 741.557960] env[61974]: DEBUG nova.network.neutron [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.657896] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "e9309651-2fcb-40ad-babb-950042fe68f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.658386] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "e9309651-2fcb-40ad-babb-950042fe68f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.681651] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "b2d442b2-1927-481c-a232-8514444004a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.683326] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "b2d442b2-1927-481c-a232-8514444004a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.814014] env[61974]: DEBUG nova.compute.manager [req-9d7f2fa9-f932-4216-b0f2-5661b8f923cf req-0f972274-0159-49dc-90a8-9e5af2a1c40d service nova] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Received event network-changed-b46d1d24-64da-4fe2-bd3e-ed3715b818e7 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 741.814313] env[61974]: DEBUG nova.compute.manager [req-9d7f2fa9-f932-4216-b0f2-5661b8f923cf req-0f972274-0159-49dc-90a8-9e5af2a1c40d service nova] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Refreshing instance network info cache due to event network-changed-b46d1d24-64da-4fe2-bd3e-ed3715b818e7. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 741.814551] env[61974]: DEBUG oslo_concurrency.lockutils [req-9d7f2fa9-f932-4216-b0f2-5661b8f923cf req-0f972274-0159-49dc-90a8-9e5af2a1c40d service nova] Acquiring lock "refresh_cache-9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.971507] env[61974]: DEBUG nova.network.neutron [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.035219] env[61974]: DEBUG nova.network.neutron [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.060778] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Releasing lock "refresh_cache-ccc4d6d9-979a-468a-9b7a-4633662c4052" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.061034] env[61974]: DEBUG nova.compute.manager [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 742.061206] env[61974]: DEBUG nova.compute.manager [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 742.061380] env[61974]: DEBUG nova.network.neutron [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 742.077142] env[61974]: DEBUG nova.network.neutron [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.433016] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88f6c7f-0ebe-40e3-b34d-312080826800 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.439213] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a0b0675-8d60-4cf2-9b1f-4def6fbfb011 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.468507] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c87c4fc-87f9-4e28-a72f-d7bc9c3cf59f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.475778] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f923d726-386f-44ab-86d5-7a87a6c32489 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.490095] env[61974]: DEBUG nova.compute.provider_tree [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 742.537589] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Releasing lock "refresh_cache-9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.538064] env[61974]: DEBUG nova.compute.manager [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 742.538261] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 742.538570] env[61974]: DEBUG oslo_concurrency.lockutils [req-9d7f2fa9-f932-4216-b0f2-5661b8f923cf req-0f972274-0159-49dc-90a8-9e5af2a1c40d service nova] Acquired lock "refresh_cache-9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.538738] env[61974]: DEBUG nova.network.neutron [req-9d7f2fa9-f932-4216-b0f2-5661b8f923cf req-0f972274-0159-49dc-90a8-9e5af2a1c40d service nova] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Refreshing network info cache for port b46d1d24-64da-4fe2-bd3e-ed3715b818e7 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 742.539858] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-43fd3ad9-1975-4219-8dde-27fb66e07b0f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.548794] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d73203-d109-4333-ab77-48e0c9f825b9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.572814] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff could not be found. [ 742.573060] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 742.573247] env[61974]: INFO nova.compute.manager [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Took 0.03 seconds to destroy the instance on the hypervisor. [ 742.573488] env[61974]: DEBUG oslo.service.loopingcall [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 742.573700] env[61974]: DEBUG nova.compute.manager [-] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 742.573794] env[61974]: DEBUG nova.network.neutron [-] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 742.579458] env[61974]: DEBUG nova.network.neutron [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.589756] env[61974]: DEBUG nova.network.neutron [-] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.995876] env[61974]: DEBUG nova.scheduler.client.report [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 743.057741] env[61974]: DEBUG nova.network.neutron [req-9d7f2fa9-f932-4216-b0f2-5661b8f923cf req-0f972274-0159-49dc-90a8-9e5af2a1c40d service nova] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 743.081560] env[61974]: INFO nova.compute.manager [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] [instance: ccc4d6d9-979a-468a-9b7a-4633662c4052] Took 1.02 seconds to deallocate network for instance. [ 743.091705] env[61974]: DEBUG nova.network.neutron [-] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.108807] env[61974]: DEBUG nova.network.neutron [req-9d7f2fa9-f932-4216-b0f2-5661b8f923cf req-0f972274-0159-49dc-90a8-9e5af2a1c40d service nova] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.501018] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.534s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.501641] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 743.505169] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.782s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.594094] env[61974]: INFO nova.compute.manager [-] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Took 1.02 seconds to deallocate network for instance. [ 743.596148] env[61974]: DEBUG nova.compute.claims [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 743.596332] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.611990] env[61974]: DEBUG oslo_concurrency.lockutils [req-9d7f2fa9-f932-4216-b0f2-5661b8f923cf req-0f972274-0159-49dc-90a8-9e5af2a1c40d service nova] Releasing lock "refresh_cache-9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.612307] env[61974]: DEBUG nova.compute.manager [req-9d7f2fa9-f932-4216-b0f2-5661b8f923cf req-0f972274-0159-49dc-90a8-9e5af2a1c40d service nova] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Received event network-vif-deleted-b46d1d24-64da-4fe2-bd3e-ed3715b818e7 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 744.006209] env[61974]: DEBUG nova.compute.utils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 744.007662] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 744.007831] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 744.059634] env[61974]: DEBUG nova.policy [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3d87e15fc4994341aa489612dd6b614c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2bbf3c92cc6e40c1a7e2617f63ffbecc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 744.106356] env[61974]: INFO nova.scheduler.client.report [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Deleted allocations for instance ccc4d6d9-979a-468a-9b7a-4633662c4052 [ 744.377099] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Successfully created port: fc801ec7-9ff3-4870-ace1-8fc2b9f6e51d {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 744.413668] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3073dac-b5e9-45fe-baf8-5d688c0a7e58 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.422723] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249f2a11-3701-469b-85cb-921e2fc523d5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.455697] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357fb692-c2e5-41ec-8baf-8e407ea7fc0a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.463344] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef3c446-93af-413e-b13d-af7ca5a357c9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.477141] env[61974]: DEBUG nova.compute.provider_tree [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 744.510613] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 744.620511] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32af34f3-2050-4aba-86db-7a138dc8844b tempest-MigrationsAdminTest-1127370048 tempest-MigrationsAdminTest-1127370048-project-member] Lock "ccc4d6d9-979a-468a-9b7a-4633662c4052" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.557s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.980660] env[61974]: DEBUG nova.scheduler.client.report [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 745.122824] env[61974]: DEBUG nova.compute.manager [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 745.230189] env[61974]: DEBUG nova.compute.manager [req-95a014c0-9aef-43d9-8706-7e6bca40bbc2 req-80d709ee-4458-4788-a238-94c7c55f3e19 service nova] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Received event network-changed-fc801ec7-9ff3-4870-ace1-8fc2b9f6e51d {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 745.230315] env[61974]: DEBUG nova.compute.manager [req-95a014c0-9aef-43d9-8706-7e6bca40bbc2 req-80d709ee-4458-4788-a238-94c7c55f3e19 service nova] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Refreshing instance network info cache due to event network-changed-fc801ec7-9ff3-4870-ace1-8fc2b9f6e51d. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 745.230477] env[61974]: DEBUG oslo_concurrency.lockutils [req-95a014c0-9aef-43d9-8706-7e6bca40bbc2 req-80d709ee-4458-4788-a238-94c7c55f3e19 service nova] Acquiring lock "refresh_cache-99a03824-dd33-4916-84f7-4c911a98c9d1" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.230619] env[61974]: DEBUG oslo_concurrency.lockutils [req-95a014c0-9aef-43d9-8706-7e6bca40bbc2 req-80d709ee-4458-4788-a238-94c7c55f3e19 service nova] Acquired lock "refresh_cache-99a03824-dd33-4916-84f7-4c911a98c9d1" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.230872] env[61974]: DEBUG nova.network.neutron [req-95a014c0-9aef-43d9-8706-7e6bca40bbc2 req-80d709ee-4458-4788-a238-94c7c55f3e19 service nova] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Refreshing network info cache for port fc801ec7-9ff3-4870-ace1-8fc2b9f6e51d {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 745.463592] env[61974]: ERROR nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fc801ec7-9ff3-4870-ace1-8fc2b9f6e51d, please check neutron logs for more information. [ 745.463592] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 745.463592] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 745.463592] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 745.463592] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 745.463592] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 745.463592] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 745.463592] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 745.463592] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 745.463592] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 745.463592] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 745.463592] env[61974]: ERROR nova.compute.manager raise self.value [ 745.463592] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 745.463592] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 745.463592] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 745.463592] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 745.464087] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 745.464087] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 745.464087] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fc801ec7-9ff3-4870-ace1-8fc2b9f6e51d, please check neutron logs for more information. [ 745.464087] env[61974]: ERROR nova.compute.manager [ 745.464087] env[61974]: Traceback (most recent call last): [ 745.464087] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 745.464087] env[61974]: listener.cb(fileno) [ 745.464087] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 745.464087] env[61974]: result = function(*args, **kwargs) [ 745.464087] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 745.464087] env[61974]: return func(*args, **kwargs) [ 745.464087] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 745.464087] env[61974]: raise e [ 745.464087] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 745.464087] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 745.464087] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 745.464087] env[61974]: created_port_ids = self._update_ports_for_instance( [ 745.464087] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 745.464087] env[61974]: with excutils.save_and_reraise_exception(): [ 745.464087] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 745.464087] env[61974]: self.force_reraise() [ 745.464087] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 745.464087] env[61974]: raise self.value [ 745.464087] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 745.464087] env[61974]: updated_port = self._update_port( [ 745.464087] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 745.464087] env[61974]: _ensure_no_port_binding_failure(port) [ 745.464087] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 745.464087] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 745.465053] env[61974]: nova.exception.PortBindingFailed: Binding failed for port fc801ec7-9ff3-4870-ace1-8fc2b9f6e51d, please check neutron logs for more information. [ 745.465053] env[61974]: Removing descriptor: 21 [ 745.494022] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.987s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.494022] env[61974]: ERROR nova.compute.manager [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 24fcf52e-4a33-4aaf-b26b-f2df48d0f409, please check neutron logs for more information. [ 745.494022] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Traceback (most recent call last): [ 745.494022] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 745.494022] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] self.driver.spawn(context, instance, image_meta, [ 745.494022] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 745.494022] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 745.494022] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 745.494022] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] vm_ref = self.build_virtual_machine(instance, [ 745.494341] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 745.494341] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] vif_infos = vmwarevif.get_vif_info(self._session, [ 745.494341] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 745.494341] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] for vif in network_info: [ 745.494341] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 745.494341] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] return self._sync_wrapper(fn, *args, **kwargs) [ 745.494341] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 745.494341] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] self.wait() [ 745.494341] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 745.494341] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] self[:] = self._gt.wait() [ 745.494341] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 745.494341] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] return self._exit_event.wait() [ 745.494341] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 745.494664] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] result = hub.switch() [ 745.494664] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 745.494664] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] return self.greenlet.switch() [ 745.494664] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 745.494664] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] result = function(*args, **kwargs) [ 745.494664] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 745.494664] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] return func(*args, **kwargs) [ 745.494664] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 745.494664] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] raise e [ 745.494664] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 745.494664] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] nwinfo = self.network_api.allocate_for_instance( [ 745.494664] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 745.494664] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] created_port_ids = self._update_ports_for_instance( [ 745.494973] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 745.494973] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] with excutils.save_and_reraise_exception(): [ 745.494973] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 745.494973] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] self.force_reraise() [ 745.494973] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 745.494973] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] raise self.value [ 745.494973] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 745.494973] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] updated_port = self._update_port( [ 745.494973] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 745.494973] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] _ensure_no_port_binding_failure(port) [ 745.494973] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 745.494973] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] raise exception.PortBindingFailed(port_id=port['id']) [ 745.495271] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] nova.exception.PortBindingFailed: Binding failed for port 24fcf52e-4a33-4aaf-b26b-f2df48d0f409, please check neutron logs for more information. [ 745.495271] env[61974]: ERROR nova.compute.manager [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] [ 745.495271] env[61974]: DEBUG nova.compute.utils [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Binding failed for port 24fcf52e-4a33-4aaf-b26b-f2df48d0f409, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 745.496297] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.621s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.496676] env[61974]: DEBUG nova.objects.instance [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61974) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 745.501625] env[61974]: DEBUG nova.compute.manager [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Build of instance c3eb4869-0bde-4398-bf34-3ee6073174e5 was re-scheduled: Binding failed for port 24fcf52e-4a33-4aaf-b26b-f2df48d0f409, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 745.502102] env[61974]: DEBUG nova.compute.manager [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 745.502343] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "refresh_cache-c3eb4869-0bde-4398-bf34-3ee6073174e5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.502489] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired lock "refresh_cache-c3eb4869-0bde-4398-bf34-3ee6073174e5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.502666] env[61974]: DEBUG nova.network.neutron [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 745.520213] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 745.552731] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 745.552980] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 745.553179] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 745.553377] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 745.553556] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 745.554516] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 745.554516] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 745.554516] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 745.554516] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 745.554516] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 745.554738] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 745.556038] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d92dd76-5553-4603-96e3-5602eec386f6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.564552] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8eac1f-5780-4fe2-ad92-9f9b22eabe5f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.581022] env[61974]: ERROR nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fc801ec7-9ff3-4870-ace1-8fc2b9f6e51d, please check neutron logs for more information. [ 745.581022] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Traceback (most recent call last): [ 745.581022] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 745.581022] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] yield resources [ 745.581022] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 745.581022] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] self.driver.spawn(context, instance, image_meta, [ 745.581022] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 745.581022] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 745.581022] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 745.581022] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] vm_ref = self.build_virtual_machine(instance, [ 745.581022] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 745.581451] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] vif_infos = vmwarevif.get_vif_info(self._session, [ 745.581451] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 745.581451] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] for vif in network_info: [ 745.581451] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 745.581451] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] return self._sync_wrapper(fn, *args, **kwargs) [ 745.581451] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 745.581451] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] self.wait() [ 745.581451] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 745.581451] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] self[:] = self._gt.wait() [ 745.581451] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 745.581451] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] return self._exit_event.wait() [ 745.581451] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 745.581451] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] current.throw(*self._exc) [ 745.581820] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 745.581820] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] result = function(*args, **kwargs) [ 745.581820] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 745.581820] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] return func(*args, **kwargs) [ 745.581820] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 745.581820] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] raise e [ 745.581820] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 745.581820] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] nwinfo = self.network_api.allocate_for_instance( [ 745.581820] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 745.581820] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] created_port_ids = self._update_ports_for_instance( [ 745.581820] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 745.581820] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] with excutils.save_and_reraise_exception(): [ 745.581820] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 745.582217] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] self.force_reraise() [ 745.582217] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 745.582217] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] raise self.value [ 745.582217] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 745.582217] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] updated_port = self._update_port( [ 745.582217] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 745.582217] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] _ensure_no_port_binding_failure(port) [ 745.582217] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 745.582217] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] raise exception.PortBindingFailed(port_id=port['id']) [ 745.582217] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] nova.exception.PortBindingFailed: Binding failed for port fc801ec7-9ff3-4870-ace1-8fc2b9f6e51d, please check neutron logs for more information. [ 745.582217] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] [ 745.582217] env[61974]: INFO nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Terminating instance [ 745.583460] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Acquiring lock "refresh_cache-99a03824-dd33-4916-84f7-4c911a98c9d1" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.644962] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.748575] env[61974]: DEBUG nova.network.neutron [req-95a014c0-9aef-43d9-8706-7e6bca40bbc2 req-80d709ee-4458-4788-a238-94c7c55f3e19 service nova] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 745.849092] env[61974]: DEBUG nova.network.neutron [req-95a014c0-9aef-43d9-8706-7e6bca40bbc2 req-80d709ee-4458-4788-a238-94c7c55f3e19 service nova] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.024920] env[61974]: DEBUG nova.network.neutron [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 746.115759] env[61974]: DEBUG nova.network.neutron [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.352088] env[61974]: DEBUG oslo_concurrency.lockutils [req-95a014c0-9aef-43d9-8706-7e6bca40bbc2 req-80d709ee-4458-4788-a238-94c7c55f3e19 service nova] Releasing lock "refresh_cache-99a03824-dd33-4916-84f7-4c911a98c9d1" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.352596] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Acquired lock "refresh_cache-99a03824-dd33-4916-84f7-4c911a98c9d1" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.352783] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 746.510049] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d38c07d1-0fc1-4e96-858b-1f182cf867fd tempest-ServersAdmin275Test-388990723 tempest-ServersAdmin275Test-388990723-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.510682] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.968s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.512191] env[61974]: INFO nova.compute.claims [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 746.618179] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Releasing lock "refresh_cache-c3eb4869-0bde-4398-bf34-3ee6073174e5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.618179] env[61974]: DEBUG nova.compute.manager [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 746.618179] env[61974]: DEBUG nova.compute.manager [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 746.618548] env[61974]: DEBUG nova.network.neutron [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 746.642534] env[61974]: DEBUG nova.network.neutron [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 746.874811] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 747.027492] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.145310] env[61974]: DEBUG nova.network.neutron [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.253800] env[61974]: DEBUG nova.compute.manager [req-80ffee72-6aae-4b9a-94d2-09a4e1b31228 req-69d2dd52-9f0e-4d95-a3e7-897e3260a796 service nova] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Received event network-vif-deleted-fc801ec7-9ff3-4870-ace1-8fc2b9f6e51d {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 747.531247] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Releasing lock "refresh_cache-99a03824-dd33-4916-84f7-4c911a98c9d1" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.532553] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 747.533182] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 747.534053] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d5ac6667-5f0c-4e5c-aa56-f3cce5c3b150 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.552256] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3ae7da3-afcf-43b1-832b-ac3cfc98856f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.587360] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 99a03824-dd33-4916-84f7-4c911a98c9d1 could not be found. [ 747.587649] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 747.587875] env[61974]: INFO nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Took 0.05 seconds to destroy the instance on the hypervisor. [ 747.588260] env[61974]: DEBUG oslo.service.loopingcall [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 747.588618] env[61974]: DEBUG nova.compute.manager [-] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 747.588702] env[61974]: DEBUG nova.network.neutron [-] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 747.604617] env[61974]: DEBUG nova.network.neutron [-] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 747.648359] env[61974]: INFO nova.compute.manager [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c3eb4869-0bde-4398-bf34-3ee6073174e5] Took 1.03 seconds to deallocate network for instance. [ 748.017637] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a64670b0-9484-49c1-8eb0-329156cdac80 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.027326] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80bbf458-a35b-4b0f-a8f3-d980f793615a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.059969] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456eb34b-ba29-48cf-83e9-db609ef51a7f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.068548] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-310b2051-e2ab-4a89-85de-81aa64ff0704 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.083428] env[61974]: DEBUG nova.compute.provider_tree [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.109503] env[61974]: DEBUG nova.network.neutron [-] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.586829] env[61974]: DEBUG nova.scheduler.client.report [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 748.610342] env[61974]: INFO nova.compute.manager [-] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Took 1.02 seconds to deallocate network for instance. [ 748.612774] env[61974]: DEBUG nova.compute.claims [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 748.612959] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.683015] env[61974]: INFO nova.scheduler.client.report [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Deleted allocations for instance c3eb4869-0bde-4398-bf34-3ee6073174e5 [ 749.093098] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.582s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.093606] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 749.097197] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.104s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.097197] env[61974]: DEBUG nova.objects.instance [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Lazy-loading 'resources' on Instance uuid 93a736b5-5423-4378-8b0c-73a0c46414ca {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 749.194677] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ebb04bbe-6f52-4f1c-bdb6-622295a838a3 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "c3eb4869-0bde-4398-bf34-3ee6073174e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.736s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.601098] env[61974]: DEBUG nova.compute.utils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 749.605536] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 749.605668] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 749.683301] env[61974]: DEBUG nova.policy [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3d87e15fc4994341aa489612dd6b614c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2bbf3c92cc6e40c1a7e2617f63ffbecc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 749.698375] env[61974]: DEBUG nova.compute.manager [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 750.031751] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5df98f2-5544-4922-b54b-a2ddc5dd6610 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.046154] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ba5521-c239-481a-b871-ecdcdbbd17c4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.081883] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e44e20a-4fb0-46e8-bd02-ddd695677539 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.089214] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40f5afd5-9e84-4904-9ec2-5ab7cf6cb40b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.106430] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 750.109833] env[61974]: DEBUG nova.compute.provider_tree [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 750.129224] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Successfully created port: b0a4e268-72a6-410a-80c5-90ef7c6ea789 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 750.218505] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.616177] env[61974]: DEBUG nova.scheduler.client.report [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 751.121300] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 751.124307] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.027s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.126191] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.140s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.150432] env[61974]: INFO nova.scheduler.client.report [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Deleted allocations for instance 93a736b5-5423-4378-8b0c-73a0c46414ca [ 751.162385] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 751.162626] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 751.162779] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 751.162953] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 751.163118] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 751.163260] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 751.163459] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 751.163608] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 751.163765] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 751.163921] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 751.164709] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 751.165707] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd500b8-fb5a-457e-bdf5-986c521e9fa5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.176255] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b265b8b-c350-4871-acf9-1a0589d01239 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.313162] env[61974]: DEBUG nova.compute.manager [req-28e8b457-99d2-4a71-81ae-05bcc6a73da3 req-b40430fd-5013-43a9-bf67-3a6a9ce675e3 service nova] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Received event network-changed-b0a4e268-72a6-410a-80c5-90ef7c6ea789 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 751.313162] env[61974]: DEBUG nova.compute.manager [req-28e8b457-99d2-4a71-81ae-05bcc6a73da3 req-b40430fd-5013-43a9-bf67-3a6a9ce675e3 service nova] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Refreshing instance network info cache due to event network-changed-b0a4e268-72a6-410a-80c5-90ef7c6ea789. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 751.313162] env[61974]: DEBUG oslo_concurrency.lockutils [req-28e8b457-99d2-4a71-81ae-05bcc6a73da3 req-b40430fd-5013-43a9-bf67-3a6a9ce675e3 service nova] Acquiring lock "refresh_cache-3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.313162] env[61974]: DEBUG oslo_concurrency.lockutils [req-28e8b457-99d2-4a71-81ae-05bcc6a73da3 req-b40430fd-5013-43a9-bf67-3a6a9ce675e3 service nova] Acquired lock "refresh_cache-3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.313162] env[61974]: DEBUG nova.network.neutron [req-28e8b457-99d2-4a71-81ae-05bcc6a73da3 req-b40430fd-5013-43a9-bf67-3a6a9ce675e3 service nova] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Refreshing network info cache for port b0a4e268-72a6-410a-80c5-90ef7c6ea789 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.500092] env[61974]: ERROR nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b0a4e268-72a6-410a-80c5-90ef7c6ea789, please check neutron logs for more information. [ 751.500092] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 751.500092] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 751.500092] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 751.500092] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 751.500092] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 751.500092] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 751.500092] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 751.500092] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 751.500092] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 751.500092] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 751.500092] env[61974]: ERROR nova.compute.manager raise self.value [ 751.500092] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 751.500092] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 751.500092] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 751.500092] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 751.500521] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 751.500521] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 751.500521] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b0a4e268-72a6-410a-80c5-90ef7c6ea789, please check neutron logs for more information. [ 751.500521] env[61974]: ERROR nova.compute.manager [ 751.500521] env[61974]: Traceback (most recent call last): [ 751.500521] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 751.500521] env[61974]: listener.cb(fileno) [ 751.500521] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 751.500521] env[61974]: result = function(*args, **kwargs) [ 751.500521] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 751.500521] env[61974]: return func(*args, **kwargs) [ 751.500521] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 751.500521] env[61974]: raise e [ 751.500521] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 751.500521] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 751.500521] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 751.500521] env[61974]: created_port_ids = self._update_ports_for_instance( [ 751.500521] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 751.500521] env[61974]: with excutils.save_and_reraise_exception(): [ 751.500521] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 751.500521] env[61974]: self.force_reraise() [ 751.500521] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 751.500521] env[61974]: raise self.value [ 751.500521] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 751.500521] env[61974]: updated_port = self._update_port( [ 751.500521] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 751.500521] env[61974]: _ensure_no_port_binding_failure(port) [ 751.500521] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 751.500521] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 751.501256] env[61974]: nova.exception.PortBindingFailed: Binding failed for port b0a4e268-72a6-410a-80c5-90ef7c6ea789, please check neutron logs for more information. [ 751.501256] env[61974]: Removing descriptor: 21 [ 751.501256] env[61974]: ERROR nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b0a4e268-72a6-410a-80c5-90ef7c6ea789, please check neutron logs for more information. [ 751.501256] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Traceback (most recent call last): [ 751.501256] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 751.501256] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] yield resources [ 751.501256] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 751.501256] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] self.driver.spawn(context, instance, image_meta, [ 751.501256] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 751.501256] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] self._vmops.spawn(context, instance, image_meta, injected_files, [ 751.501256] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 751.501256] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] vm_ref = self.build_virtual_machine(instance, [ 751.501580] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 751.501580] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] vif_infos = vmwarevif.get_vif_info(self._session, [ 751.501580] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 751.501580] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] for vif in network_info: [ 751.501580] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 751.501580] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] return self._sync_wrapper(fn, *args, **kwargs) [ 751.501580] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 751.501580] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] self.wait() [ 751.501580] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 751.501580] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] self[:] = self._gt.wait() [ 751.501580] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 751.501580] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] return self._exit_event.wait() [ 751.501580] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 751.501896] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] result = hub.switch() [ 751.501896] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 751.501896] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] return self.greenlet.switch() [ 751.501896] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 751.501896] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] result = function(*args, **kwargs) [ 751.501896] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 751.501896] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] return func(*args, **kwargs) [ 751.501896] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 751.501896] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] raise e [ 751.501896] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 751.501896] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] nwinfo = self.network_api.allocate_for_instance( [ 751.501896] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 751.501896] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] created_port_ids = self._update_ports_for_instance( [ 751.502304] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 751.502304] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] with excutils.save_and_reraise_exception(): [ 751.502304] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 751.502304] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] self.force_reraise() [ 751.502304] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 751.502304] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] raise self.value [ 751.502304] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 751.502304] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] updated_port = self._update_port( [ 751.502304] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 751.502304] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] _ensure_no_port_binding_failure(port) [ 751.502304] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 751.502304] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] raise exception.PortBindingFailed(port_id=port['id']) [ 751.502603] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] nova.exception.PortBindingFailed: Binding failed for port b0a4e268-72a6-410a-80c5-90ef7c6ea789, please check neutron logs for more information. [ 751.502603] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] [ 751.502603] env[61974]: INFO nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Terminating instance [ 751.504154] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Acquiring lock "refresh_cache-3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.664068] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c2f47c13-3f2e-49a8-8ffc-9b6b4e73aadc tempest-ServersAdmin275Test-1579765304 tempest-ServersAdmin275Test-1579765304-project-member] Lock "93a736b5-5423-4378-8b0c-73a0c46414ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.429s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.844732] env[61974]: DEBUG nova.network.neutron [req-28e8b457-99d2-4a71-81ae-05bcc6a73da3 req-b40430fd-5013-43a9-bf67-3a6a9ce675e3 service nova] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 752.029154] env[61974]: DEBUG nova.network.neutron [req-28e8b457-99d2-4a71-81ae-05bcc6a73da3 req-b40430fd-5013-43a9-bf67-3a6a9ce675e3 service nova] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.047043] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-433e9453-fe00-4791-8e20-a10510eb661e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.055458] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a55a9fd7-477f-4d19-b039-c23bb4fec78d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.089525] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6029c75d-1f03-4273-8832-84ee536c4222 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.098754] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3daf30c-951e-4207-90f9-cc770138df3f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.115037] env[61974]: DEBUG nova.compute.provider_tree [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 752.273118] env[61974]: DEBUG oslo_concurrency.lockutils [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "db03b815-295a-4a66-9afd-a1f4ba97601f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.273387] env[61974]: DEBUG oslo_concurrency.lockutils [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "db03b815-295a-4a66-9afd-a1f4ba97601f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.537296] env[61974]: DEBUG oslo_concurrency.lockutils [req-28e8b457-99d2-4a71-81ae-05bcc6a73da3 req-b40430fd-5013-43a9-bf67-3a6a9ce675e3 service nova] Releasing lock "refresh_cache-3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.538376] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Acquired lock "refresh_cache-3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.539802] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 752.618412] env[61974]: DEBUG nova.scheduler.client.report [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 753.062846] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 753.127088] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.997s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.127088] env[61974]: ERROR nova.compute.manager [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e1fe7370-4293-451f-9204-d8efd7c9927b, please check neutron logs for more information. [ 753.127088] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Traceback (most recent call last): [ 753.127088] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 753.127088] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] self.driver.spawn(context, instance, image_meta, [ 753.127088] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 753.127088] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 753.127088] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 753.127088] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] vm_ref = self.build_virtual_machine(instance, [ 753.127449] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 753.127449] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] vif_infos = vmwarevif.get_vif_info(self._session, [ 753.127449] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 753.127449] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] for vif in network_info: [ 753.127449] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 753.127449] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] return self._sync_wrapper(fn, *args, **kwargs) [ 753.127449] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 753.127449] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] self.wait() [ 753.127449] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 753.127449] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] self[:] = self._gt.wait() [ 753.127449] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 753.127449] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] return self._exit_event.wait() [ 753.127449] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 753.127823] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] current.throw(*self._exc) [ 753.127823] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 753.127823] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] result = function(*args, **kwargs) [ 753.127823] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 753.127823] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] return func(*args, **kwargs) [ 753.127823] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 753.127823] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] raise e [ 753.127823] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 753.127823] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] nwinfo = self.network_api.allocate_for_instance( [ 753.127823] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 753.127823] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] created_port_ids = self._update_ports_for_instance( [ 753.127823] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 753.127823] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] with excutils.save_and_reraise_exception(): [ 753.128200] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 753.128200] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] self.force_reraise() [ 753.128200] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 753.128200] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] raise self.value [ 753.128200] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 753.128200] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] updated_port = self._update_port( [ 753.128200] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 753.128200] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] _ensure_no_port_binding_failure(port) [ 753.128200] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 753.128200] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] raise exception.PortBindingFailed(port_id=port['id']) [ 753.128200] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] nova.exception.PortBindingFailed: Binding failed for port e1fe7370-4293-451f-9204-d8efd7c9927b, please check neutron logs for more information. [ 753.128200] env[61974]: ERROR nova.compute.manager [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] [ 753.128558] env[61974]: DEBUG nova.compute.utils [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Binding failed for port e1fe7370-4293-451f-9204-d8efd7c9927b, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 753.128558] env[61974]: DEBUG nova.compute.manager [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Build of instance 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c was re-scheduled: Binding failed for port e1fe7370-4293-451f-9204-d8efd7c9927b, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 753.128944] env[61974]: DEBUG nova.compute.manager [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 753.129192] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Acquiring lock "refresh_cache-6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.129356] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Acquired lock "refresh_cache-6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.129523] env[61974]: DEBUG nova.network.neutron [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 753.130611] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.665s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.132762] env[61974]: INFO nova.compute.claims [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 753.196337] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.358489] env[61974]: DEBUG nova.compute.manager [req-90541978-0a46-475d-b7ba-f9781a84c0c1 req-94e705b3-d3b4-42ff-ba00-70fba01a8689 service nova] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Received event network-vif-deleted-b0a4e268-72a6-410a-80c5-90ef7c6ea789 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 753.660775] env[61974]: DEBUG nova.network.neutron [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 753.698979] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Releasing lock "refresh_cache-3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.698979] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 753.699193] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 753.699517] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-39d3962d-0e05-4846-aa8c-15ccc072291e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.713517] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1851d6f-53d5-4e3d-bd2a-396cd0858ad2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.740659] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40 could not be found. [ 753.740883] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 753.741071] env[61974]: INFO nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Took 0.04 seconds to destroy the instance on the hypervisor. [ 753.741319] env[61974]: DEBUG oslo.service.loopingcall [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 753.741549] env[61974]: DEBUG nova.compute.manager [-] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 753.741644] env[61974]: DEBUG nova.network.neutron [-] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 753.771749] env[61974]: DEBUG nova.network.neutron [-] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 753.802570] env[61974]: DEBUG nova.network.neutron [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.274662] env[61974]: DEBUG nova.network.neutron [-] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.305059] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Releasing lock "refresh_cache-6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.305300] env[61974]: DEBUG nova.compute.manager [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 754.306448] env[61974]: DEBUG nova.compute.manager [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 754.306448] env[61974]: DEBUG nova.network.neutron [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 754.340688] env[61974]: DEBUG nova.network.neutron [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 754.583802] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf424d1-acea-4530-9ce0-d9fc66fddf55 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.595230] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3bb4df-9cde-4804-90f3-966acd115342 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.629206] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77f8f8e-d97d-47a8-bdd0-51889c670f65 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.637052] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cbfcca0-7502-405c-a54e-71cb43a08c63 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.660957] env[61974]: DEBUG nova.compute.provider_tree [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 754.776858] env[61974]: INFO nova.compute.manager [-] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Took 1.04 seconds to deallocate network for instance. [ 754.779975] env[61974]: DEBUG nova.compute.claims [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 754.780196] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.849518] env[61974]: DEBUG nova.network.neutron [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.167809] env[61974]: DEBUG nova.scheduler.client.report [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 755.355235] env[61974]: INFO nova.compute.manager [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] [instance: 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c] Took 1.05 seconds to deallocate network for instance. [ 755.673187] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.542s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.673966] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 755.676659] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.158s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.678228] env[61974]: INFO nova.compute.claims [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 756.182911] env[61974]: DEBUG nova.compute.utils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 756.190963] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 756.190963] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 756.397328] env[61974]: INFO nova.scheduler.client.report [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Deleted allocations for instance 6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c [ 756.431788] env[61974]: DEBUG nova.policy [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3d87e15fc4994341aa489612dd6b614c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2bbf3c92cc6e40c1a7e2617f63ffbecc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 756.701613] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 756.908320] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Successfully created port: 617e8233-03fc-4fc1-8e63-0ea60fa021ee {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 756.910328] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1df0a26a-4825-49b9-b1c3-2b2549bb7459 tempest-FloatingIPsAssociationTestJSON-889181218 tempest-FloatingIPsAssociationTestJSON-889181218-project-member] Lock "6bacb8ee-e3a7-49f4-85a5-a86f5d984a6c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 122.699s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.111832] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1379e3f2-f086-47f9-aaaf-7cb938343d1c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.121910] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a25c44-38a4-4de9-ac6d-cf46c583d9db {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.154138] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae20d6da-3f1b-43b3-9ebe-16e2488f4afb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.162306] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4067dc3a-506a-4652-916b-c56e6a316aa3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.178347] env[61974]: DEBUG nova.compute.provider_tree [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 757.417768] env[61974]: DEBUG nova.compute.manager [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 757.682960] env[61974]: DEBUG nova.scheduler.client.report [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 757.710011] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 757.741647] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 757.741896] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 757.742108] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 757.742303] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 757.742451] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 757.742598] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 757.742803] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 757.742959] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 757.743140] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 757.743306] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 757.743519] env[61974]: DEBUG nova.virt.hardware [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 757.744390] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49bb1089-dc75-4485-9cdb-f2391830991a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.752612] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9da7ffe-db43-43b9-9b89-18504c5f4548 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.943837] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.194256] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.514s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.194256] env[61974]: DEBUG nova.compute.manager [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 758.198032] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.735s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.198032] env[61974]: INFO nova.compute.claims [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 758.497765] env[61974]: DEBUG nova.compute.manager [req-bf0b6f02-19f5-4cf3-8fe1-5c81aee6cf22 req-2d592e20-4f2c-4472-af8f-2963173cb705 service nova] [instance: a333f129-6a86-4715-83e2-79543620d013] Received event network-changed-617e8233-03fc-4fc1-8e63-0ea60fa021ee {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 758.497972] env[61974]: DEBUG nova.compute.manager [req-bf0b6f02-19f5-4cf3-8fe1-5c81aee6cf22 req-2d592e20-4f2c-4472-af8f-2963173cb705 service nova] [instance: a333f129-6a86-4715-83e2-79543620d013] Refreshing instance network info cache due to event network-changed-617e8233-03fc-4fc1-8e63-0ea60fa021ee. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 758.498211] env[61974]: DEBUG oslo_concurrency.lockutils [req-bf0b6f02-19f5-4cf3-8fe1-5c81aee6cf22 req-2d592e20-4f2c-4472-af8f-2963173cb705 service nova] Acquiring lock "refresh_cache-a333f129-6a86-4715-83e2-79543620d013" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.498355] env[61974]: DEBUG oslo_concurrency.lockutils [req-bf0b6f02-19f5-4cf3-8fe1-5c81aee6cf22 req-2d592e20-4f2c-4472-af8f-2963173cb705 service nova] Acquired lock "refresh_cache-a333f129-6a86-4715-83e2-79543620d013" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.498517] env[61974]: DEBUG nova.network.neutron [req-bf0b6f02-19f5-4cf3-8fe1-5c81aee6cf22 req-2d592e20-4f2c-4472-af8f-2963173cb705 service nova] [instance: a333f129-6a86-4715-83e2-79543620d013] Refreshing network info cache for port 617e8233-03fc-4fc1-8e63-0ea60fa021ee {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 758.702110] env[61974]: DEBUG nova.compute.utils [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 758.710805] env[61974]: DEBUG nova.compute.manager [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 758.711066] env[61974]: DEBUG nova.network.neutron [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 758.765317] env[61974]: ERROR nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 617e8233-03fc-4fc1-8e63-0ea60fa021ee, please check neutron logs for more information. [ 758.765317] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 758.765317] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 758.765317] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 758.765317] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 758.765317] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 758.765317] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 758.765317] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 758.765317] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 758.765317] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 758.765317] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 758.765317] env[61974]: ERROR nova.compute.manager raise self.value [ 758.765317] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 758.765317] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 758.765317] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 758.765317] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 758.765792] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 758.765792] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 758.765792] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 617e8233-03fc-4fc1-8e63-0ea60fa021ee, please check neutron logs for more information. [ 758.765792] env[61974]: ERROR nova.compute.manager [ 758.765792] env[61974]: Traceback (most recent call last): [ 758.765792] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 758.765792] env[61974]: listener.cb(fileno) [ 758.765792] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 758.765792] env[61974]: result = function(*args, **kwargs) [ 758.765792] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 758.765792] env[61974]: return func(*args, **kwargs) [ 758.765792] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 758.765792] env[61974]: raise e [ 758.765792] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 758.765792] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 758.765792] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 758.765792] env[61974]: created_port_ids = self._update_ports_for_instance( [ 758.765792] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 758.765792] env[61974]: with excutils.save_and_reraise_exception(): [ 758.765792] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 758.765792] env[61974]: self.force_reraise() [ 758.765792] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 758.765792] env[61974]: raise self.value [ 758.765792] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 758.765792] env[61974]: updated_port = self._update_port( [ 758.765792] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 758.765792] env[61974]: _ensure_no_port_binding_failure(port) [ 758.765792] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 758.765792] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 758.766912] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 617e8233-03fc-4fc1-8e63-0ea60fa021ee, please check neutron logs for more information. [ 758.766912] env[61974]: Removing descriptor: 21 [ 758.766912] env[61974]: ERROR nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 617e8233-03fc-4fc1-8e63-0ea60fa021ee, please check neutron logs for more information. [ 758.766912] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] Traceback (most recent call last): [ 758.766912] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 758.766912] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] yield resources [ 758.766912] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 758.766912] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] self.driver.spawn(context, instance, image_meta, [ 758.766912] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 758.766912] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] self._vmops.spawn(context, instance, image_meta, injected_files, [ 758.766912] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 758.766912] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] vm_ref = self.build_virtual_machine(instance, [ 758.767277] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 758.767277] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] vif_infos = vmwarevif.get_vif_info(self._session, [ 758.767277] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 758.767277] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] for vif in network_info: [ 758.767277] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 758.767277] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] return self._sync_wrapper(fn, *args, **kwargs) [ 758.767277] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 758.767277] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] self.wait() [ 758.767277] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 758.767277] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] self[:] = self._gt.wait() [ 758.767277] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 758.767277] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] return self._exit_event.wait() [ 758.767277] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 758.767604] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] result = hub.switch() [ 758.767604] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 758.767604] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] return self.greenlet.switch() [ 758.767604] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 758.767604] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] result = function(*args, **kwargs) [ 758.767604] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 758.767604] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] return func(*args, **kwargs) [ 758.767604] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 758.767604] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] raise e [ 758.767604] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 758.767604] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] nwinfo = self.network_api.allocate_for_instance( [ 758.767604] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 758.767604] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] created_port_ids = self._update_ports_for_instance( [ 758.767922] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 758.767922] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] with excutils.save_and_reraise_exception(): [ 758.767922] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 758.767922] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] self.force_reraise() [ 758.767922] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 758.767922] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] raise self.value [ 758.767922] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 758.767922] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] updated_port = self._update_port( [ 758.767922] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 758.767922] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] _ensure_no_port_binding_failure(port) [ 758.767922] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 758.767922] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] raise exception.PortBindingFailed(port_id=port['id']) [ 758.768264] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] nova.exception.PortBindingFailed: Binding failed for port 617e8233-03fc-4fc1-8e63-0ea60fa021ee, please check neutron logs for more information. [ 758.768264] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] [ 758.768264] env[61974]: INFO nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Terminating instance [ 758.770709] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Acquiring lock "refresh_cache-a333f129-6a86-4715-83e2-79543620d013" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.787875] env[61974]: DEBUG nova.policy [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '127d1865b3c747948f8e04a1b0292ebc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5139ae9022fc4c1dbf6cf936dee77b6d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 759.027057] env[61974]: DEBUG nova.network.neutron [req-bf0b6f02-19f5-4cf3-8fe1-5c81aee6cf22 req-2d592e20-4f2c-4472-af8f-2963173cb705 service nova] [instance: a333f129-6a86-4715-83e2-79543620d013] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 759.218105] env[61974]: DEBUG nova.compute.manager [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 759.423749] env[61974]: DEBUG nova.network.neutron [req-bf0b6f02-19f5-4cf3-8fe1-5c81aee6cf22 req-2d592e20-4f2c-4472-af8f-2963173cb705 service nova] [instance: a333f129-6a86-4715-83e2-79543620d013] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.485233] env[61974]: DEBUG nova.network.neutron [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Successfully created port: 105582ad-592c-4a57-9b75-d5a823ec809a {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 759.519516] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Acquiring lock "5013beda-7f34-44fe-9159-f04e0aca5bce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.519768] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Lock "5013beda-7f34-44fe-9159-f04e0aca5bce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.683824] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e79d45ff-50aa-49d4-ba52-d2227fe89c7e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.691760] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-200a7b47-dedd-4e5d-a51d-ec6fffaef4c7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.731900] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4844f76d-2c3f-4973-95bc-7da67f5c87ba {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.740676] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0f48f1-7ae6-432e-a476-a864a5d57106 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.756853] env[61974]: DEBUG nova.compute.provider_tree [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 759.928177] env[61974]: DEBUG oslo_concurrency.lockutils [req-bf0b6f02-19f5-4cf3-8fe1-5c81aee6cf22 req-2d592e20-4f2c-4472-af8f-2963173cb705 service nova] Releasing lock "refresh_cache-a333f129-6a86-4715-83e2-79543620d013" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.928177] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Acquired lock "refresh_cache-a333f129-6a86-4715-83e2-79543620d013" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.928177] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 760.233134] env[61974]: DEBUG nova.compute.manager [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 760.259753] env[61974]: DEBUG nova.virt.hardware [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 760.260020] env[61974]: DEBUG nova.virt.hardware [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 760.260178] env[61974]: DEBUG nova.virt.hardware [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 760.260366] env[61974]: DEBUG nova.virt.hardware [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 760.260532] env[61974]: DEBUG nova.virt.hardware [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 760.260649] env[61974]: DEBUG nova.virt.hardware [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 760.260854] env[61974]: DEBUG nova.virt.hardware [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 760.261014] env[61974]: DEBUG nova.virt.hardware [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 760.261212] env[61974]: DEBUG nova.virt.hardware [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 760.261411] env[61974]: DEBUG nova.virt.hardware [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 760.261593] env[61974]: DEBUG nova.virt.hardware [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 760.262444] env[61974]: DEBUG nova.scheduler.client.report [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 760.266101] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a072f8-0575-4dfb-a3a7-80a06c860706 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.274564] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a25fcac-3d08-4e69-8695-16f18c455412 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.453931] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 760.528668] env[61974]: DEBUG nova.compute.manager [req-4a0b950e-eafd-4d90-9d31-b8e0197c1dfb req-615036de-c829-4e8b-8d7a-fd007c6aee4e service nova] [instance: a333f129-6a86-4715-83e2-79543620d013] Received event network-vif-deleted-617e8233-03fc-4fc1-8e63-0ea60fa021ee {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 760.584311] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.772022] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.576s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.772022] env[61974]: DEBUG nova.compute.manager [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 760.775563] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.598s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.013079] env[61974]: ERROR nova.compute.manager [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 105582ad-592c-4a57-9b75-d5a823ec809a, please check neutron logs for more information. [ 761.013079] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 761.013079] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 761.013079] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 761.013079] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 761.013079] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 761.013079] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 761.013079] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 761.013079] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 761.013079] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 761.013079] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 761.013079] env[61974]: ERROR nova.compute.manager raise self.value [ 761.013079] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 761.013079] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 761.013079] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 761.013079] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 761.013694] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 761.013694] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 761.013694] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 105582ad-592c-4a57-9b75-d5a823ec809a, please check neutron logs for more information. [ 761.013694] env[61974]: ERROR nova.compute.manager [ 761.013694] env[61974]: Traceback (most recent call last): [ 761.013694] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 761.013694] env[61974]: listener.cb(fileno) [ 761.013694] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 761.013694] env[61974]: result = function(*args, **kwargs) [ 761.013694] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 761.013694] env[61974]: return func(*args, **kwargs) [ 761.013694] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 761.013694] env[61974]: raise e [ 761.013694] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 761.013694] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 761.013694] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 761.013694] env[61974]: created_port_ids = self._update_ports_for_instance( [ 761.013694] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 761.013694] env[61974]: with excutils.save_and_reraise_exception(): [ 761.013694] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 761.013694] env[61974]: self.force_reraise() [ 761.013694] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 761.013694] env[61974]: raise self.value [ 761.013694] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 761.013694] env[61974]: updated_port = self._update_port( [ 761.013694] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 761.013694] env[61974]: _ensure_no_port_binding_failure(port) [ 761.013694] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 761.013694] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 761.014448] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 105582ad-592c-4a57-9b75-d5a823ec809a, please check neutron logs for more information. [ 761.014448] env[61974]: Removing descriptor: 20 [ 761.014448] env[61974]: ERROR nova.compute.manager [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 105582ad-592c-4a57-9b75-d5a823ec809a, please check neutron logs for more information. [ 761.014448] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Traceback (most recent call last): [ 761.014448] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 761.014448] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] yield resources [ 761.014448] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 761.014448] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] self.driver.spawn(context, instance, image_meta, [ 761.014448] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 761.014448] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] self._vmops.spawn(context, instance, image_meta, injected_files, [ 761.014448] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 761.014448] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] vm_ref = self.build_virtual_machine(instance, [ 761.014780] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 761.014780] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] vif_infos = vmwarevif.get_vif_info(self._session, [ 761.014780] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 761.014780] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] for vif in network_info: [ 761.014780] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 761.014780] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] return self._sync_wrapper(fn, *args, **kwargs) [ 761.014780] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 761.014780] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] self.wait() [ 761.014780] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 761.014780] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] self[:] = self._gt.wait() [ 761.014780] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 761.014780] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] return self._exit_event.wait() [ 761.014780] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 761.015126] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] result = hub.switch() [ 761.015126] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 761.015126] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] return self.greenlet.switch() [ 761.015126] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 761.015126] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] result = function(*args, **kwargs) [ 761.015126] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 761.015126] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] return func(*args, **kwargs) [ 761.015126] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 761.015126] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] raise e [ 761.015126] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 761.015126] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] nwinfo = self.network_api.allocate_for_instance( [ 761.015126] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 761.015126] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] created_port_ids = self._update_ports_for_instance( [ 761.015456] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 761.015456] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] with excutils.save_and_reraise_exception(): [ 761.015456] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 761.015456] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] self.force_reraise() [ 761.015456] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 761.015456] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] raise self.value [ 761.015456] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 761.015456] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] updated_port = self._update_port( [ 761.015456] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 761.015456] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] _ensure_no_port_binding_failure(port) [ 761.015456] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 761.015456] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] raise exception.PortBindingFailed(port_id=port['id']) [ 761.015754] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] nova.exception.PortBindingFailed: Binding failed for port 105582ad-592c-4a57-9b75-d5a823ec809a, please check neutron logs for more information. [ 761.015754] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] [ 761.015754] env[61974]: INFO nova.compute.manager [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Terminating instance [ 761.016997] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Acquiring lock "refresh_cache-7a465c7e-874d-4cd1-9c23-0ae249997114" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.017119] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Acquired lock "refresh_cache-7a465c7e-874d-4cd1-9c23-0ae249997114" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.017312] env[61974]: DEBUG nova.network.neutron [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 761.088362] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Releasing lock "refresh_cache-a333f129-6a86-4715-83e2-79543620d013" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.088726] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 761.088973] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 761.089217] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64f6e9cf-c146-47f6-af16-25c49a2767b7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.103178] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd7df6d-68d5-4847-86e5-6b79e44bb6fb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.128796] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a333f129-6a86-4715-83e2-79543620d013 could not be found. [ 761.128899] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 761.128988] env[61974]: INFO nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Took 0.04 seconds to destroy the instance on the hypervisor. [ 761.129294] env[61974]: DEBUG oslo.service.loopingcall [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 761.129550] env[61974]: DEBUG nova.compute.manager [-] [instance: a333f129-6a86-4715-83e2-79543620d013] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 761.129645] env[61974]: DEBUG nova.network.neutron [-] [instance: a333f129-6a86-4715-83e2-79543620d013] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 761.144341] env[61974]: DEBUG nova.network.neutron [-] [instance: a333f129-6a86-4715-83e2-79543620d013] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 761.280770] env[61974]: DEBUG nova.compute.utils [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 761.287290] env[61974]: DEBUG nova.compute.manager [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 761.287290] env[61974]: DEBUG nova.network.neutron [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 761.326411] env[61974]: DEBUG nova.policy [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64792508e734416da7a4aa838c18a541', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dfa9d214177b47b28ad4f684f6f82fe0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 761.540875] env[61974]: DEBUG nova.network.neutron [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 761.646764] env[61974]: DEBUG nova.network.neutron [-] [instance: a333f129-6a86-4715-83e2-79543620d013] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.683795] env[61974]: DEBUG nova.network.neutron [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.736771] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d8831ee-f65d-49a1-b62a-55813ab56a62 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.749535] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-332418ff-27b6-4c5d-b24c-af5dc8d2b7e2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.753679] env[61974]: DEBUG nova.network.neutron [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Successfully created port: 5d40974a-fcbf-49fd-a14a-b03902417c3a {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 761.786302] env[61974]: DEBUG nova.compute.manager [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 761.789389] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708a0c2e-b462-401a-a11a-f40c0328e000 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.801430] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91079d5-8804-4d25-aa8e-8b6ac28d2e1f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.818087] env[61974]: DEBUG nova.compute.provider_tree [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 762.150071] env[61974]: INFO nova.compute.manager [-] [instance: a333f129-6a86-4715-83e2-79543620d013] Took 1.02 seconds to deallocate network for instance. [ 762.153884] env[61974]: DEBUG nova.compute.claims [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 762.154091] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.189958] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Releasing lock "refresh_cache-7a465c7e-874d-4cd1-9c23-0ae249997114" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.190475] env[61974]: DEBUG nova.compute.manager [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 762.190679] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 762.190967] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4947ea05-0033-4bfc-97f3-e6fa018dd16f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.200063] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef2b43d9-a3a9-44cb-af87-f416b4e88af5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.222560] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7a465c7e-874d-4cd1-9c23-0ae249997114 could not be found. [ 762.222560] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 762.222560] env[61974]: INFO nova.compute.manager [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Took 0.03 seconds to destroy the instance on the hypervisor. [ 762.222560] env[61974]: DEBUG oslo.service.loopingcall [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 762.222749] env[61974]: DEBUG nova.compute.manager [-] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 762.222842] env[61974]: DEBUG nova.network.neutron [-] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 762.247714] env[61974]: DEBUG nova.network.neutron [-] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 762.323937] env[61974]: DEBUG nova.scheduler.client.report [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 762.594236] env[61974]: DEBUG nova.compute.manager [req-635cfe01-1708-4808-9544-79669737f8a8 req-a247f418-f9f8-4d47-b122-bab93ca29509 service nova] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Received event network-changed-105582ad-592c-4a57-9b75-d5a823ec809a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 762.594435] env[61974]: DEBUG nova.compute.manager [req-635cfe01-1708-4808-9544-79669737f8a8 req-a247f418-f9f8-4d47-b122-bab93ca29509 service nova] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Refreshing instance network info cache due to event network-changed-105582ad-592c-4a57-9b75-d5a823ec809a. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 762.594647] env[61974]: DEBUG oslo_concurrency.lockutils [req-635cfe01-1708-4808-9544-79669737f8a8 req-a247f418-f9f8-4d47-b122-bab93ca29509 service nova] Acquiring lock "refresh_cache-7a465c7e-874d-4cd1-9c23-0ae249997114" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.594789] env[61974]: DEBUG oslo_concurrency.lockutils [req-635cfe01-1708-4808-9544-79669737f8a8 req-a247f418-f9f8-4d47-b122-bab93ca29509 service nova] Acquired lock "refresh_cache-7a465c7e-874d-4cd1-9c23-0ae249997114" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.594953] env[61974]: DEBUG nova.network.neutron [req-635cfe01-1708-4808-9544-79669737f8a8 req-a247f418-f9f8-4d47-b122-bab93ca29509 service nova] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Refreshing network info cache for port 105582ad-592c-4a57-9b75-d5a823ec809a {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 762.752753] env[61974]: DEBUG nova.network.neutron [-] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.800495] env[61974]: DEBUG nova.compute.manager [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 762.828310] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.053s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.829166] env[61974]: ERROR nova.compute.manager [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b352c901-672c-42ad-a6f4-9e0dd6677a7d, please check neutron logs for more information. [ 762.829166] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Traceback (most recent call last): [ 762.829166] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 762.829166] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] self.driver.spawn(context, instance, image_meta, [ 762.829166] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 762.829166] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] self._vmops.spawn(context, instance, image_meta, injected_files, [ 762.829166] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 762.829166] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] vm_ref = self.build_virtual_machine(instance, [ 762.829166] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 762.829166] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] vif_infos = vmwarevif.get_vif_info(self._session, [ 762.829166] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 762.829500] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] for vif in network_info: [ 762.829500] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 762.829500] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] return self._sync_wrapper(fn, *args, **kwargs) [ 762.829500] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 762.829500] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] self.wait() [ 762.829500] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 762.829500] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] self[:] = self._gt.wait() [ 762.829500] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 762.829500] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] return self._exit_event.wait() [ 762.829500] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 762.829500] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] result = hub.switch() [ 762.829500] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 762.829500] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] return self.greenlet.switch() [ 762.829912] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 762.829912] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] result = function(*args, **kwargs) [ 762.829912] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 762.829912] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] return func(*args, **kwargs) [ 762.829912] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 762.829912] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] raise e [ 762.829912] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 762.829912] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] nwinfo = self.network_api.allocate_for_instance( [ 762.829912] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 762.829912] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] created_port_ids = self._update_ports_for_instance( [ 762.829912] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 762.829912] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] with excutils.save_and_reraise_exception(): [ 762.829912] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 762.830248] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] self.force_reraise() [ 762.830248] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 762.830248] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] raise self.value [ 762.830248] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 762.830248] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] updated_port = self._update_port( [ 762.830248] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 762.830248] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] _ensure_no_port_binding_failure(port) [ 762.830248] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 762.830248] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] raise exception.PortBindingFailed(port_id=port['id']) [ 762.830248] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] nova.exception.PortBindingFailed: Binding failed for port b352c901-672c-42ad-a6f4-9e0dd6677a7d, please check neutron logs for more information. [ 762.830248] env[61974]: ERROR nova.compute.manager [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] [ 762.830639] env[61974]: DEBUG nova.compute.utils [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Binding failed for port b352c901-672c-42ad-a6f4-9e0dd6677a7d, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 762.833562] env[61974]: DEBUG nova.virt.hardware [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 762.833782] env[61974]: DEBUG nova.virt.hardware [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 762.833938] env[61974]: DEBUG nova.virt.hardware [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 762.834164] env[61974]: DEBUG nova.virt.hardware [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 762.834320] env[61974]: DEBUG nova.virt.hardware [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 762.835645] env[61974]: DEBUG nova.virt.hardware [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 762.835645] env[61974]: DEBUG nova.virt.hardware [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 762.835645] env[61974]: DEBUG nova.virt.hardware [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 762.835645] env[61974]: DEBUG nova.virt.hardware [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 762.835645] env[61974]: DEBUG nova.virt.hardware [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 762.836073] env[61974]: DEBUG nova.virt.hardware [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 762.836073] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.239s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.839665] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f7cd7c6-bb4c-43ce-b243-5e9131fcf22e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.842975] env[61974]: DEBUG nova.compute.manager [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Build of instance 9f781418-6149-4c73-aaa0-20c8cbc8c482 was re-scheduled: Binding failed for port b352c901-672c-42ad-a6f4-9e0dd6677a7d, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 762.843643] env[61974]: DEBUG nova.compute.manager [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 762.843883] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Acquiring lock "refresh_cache-9f781418-6149-4c73-aaa0-20c8cbc8c482" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.844047] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Acquired lock "refresh_cache-9f781418-6149-4c73-aaa0-20c8cbc8c482" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.844359] env[61974]: DEBUG nova.network.neutron [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 762.852834] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099bb697-e0c0-430f-8322-b4fd18646dcb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.081954] env[61974]: ERROR nova.compute.manager [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5d40974a-fcbf-49fd-a14a-b03902417c3a, please check neutron logs for more information. [ 763.081954] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 763.081954] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 763.081954] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 763.081954] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 763.081954] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 763.081954] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 763.081954] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 763.081954] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.081954] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 763.081954] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.081954] env[61974]: ERROR nova.compute.manager raise self.value [ 763.081954] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 763.081954] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 763.081954] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.081954] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 763.083402] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.083402] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 763.083402] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5d40974a-fcbf-49fd-a14a-b03902417c3a, please check neutron logs for more information. [ 763.083402] env[61974]: ERROR nova.compute.manager [ 763.083402] env[61974]: Traceback (most recent call last): [ 763.083402] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 763.083402] env[61974]: listener.cb(fileno) [ 763.083402] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 763.083402] env[61974]: result = function(*args, **kwargs) [ 763.083402] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 763.083402] env[61974]: return func(*args, **kwargs) [ 763.083402] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 763.083402] env[61974]: raise e [ 763.083402] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 763.083402] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 763.083402] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 763.083402] env[61974]: created_port_ids = self._update_ports_for_instance( [ 763.083402] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 763.083402] env[61974]: with excutils.save_and_reraise_exception(): [ 763.083402] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.083402] env[61974]: self.force_reraise() [ 763.083402] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.083402] env[61974]: raise self.value [ 763.083402] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 763.083402] env[61974]: updated_port = self._update_port( [ 763.083402] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.083402] env[61974]: _ensure_no_port_binding_failure(port) [ 763.083402] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.083402] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 763.084281] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 5d40974a-fcbf-49fd-a14a-b03902417c3a, please check neutron logs for more information. [ 763.084281] env[61974]: Removing descriptor: 20 [ 763.084281] env[61974]: ERROR nova.compute.manager [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5d40974a-fcbf-49fd-a14a-b03902417c3a, please check neutron logs for more information. [ 763.084281] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Traceback (most recent call last): [ 763.084281] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 763.084281] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] yield resources [ 763.084281] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 763.084281] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] self.driver.spawn(context, instance, image_meta, [ 763.084281] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 763.084281] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 763.084281] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 763.084281] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] vm_ref = self.build_virtual_machine(instance, [ 763.084748] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 763.084748] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] vif_infos = vmwarevif.get_vif_info(self._session, [ 763.084748] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 763.084748] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] for vif in network_info: [ 763.084748] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 763.084748] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] return self._sync_wrapper(fn, *args, **kwargs) [ 763.084748] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 763.084748] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] self.wait() [ 763.084748] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 763.084748] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] self[:] = self._gt.wait() [ 763.084748] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 763.084748] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] return self._exit_event.wait() [ 763.084748] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 763.085113] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] result = hub.switch() [ 763.085113] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 763.085113] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] return self.greenlet.switch() [ 763.085113] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 763.085113] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] result = function(*args, **kwargs) [ 763.085113] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 763.085113] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] return func(*args, **kwargs) [ 763.085113] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 763.085113] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] raise e [ 763.085113] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 763.085113] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] nwinfo = self.network_api.allocate_for_instance( [ 763.085113] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 763.085113] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] created_port_ids = self._update_ports_for_instance( [ 763.085512] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 763.085512] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] with excutils.save_and_reraise_exception(): [ 763.085512] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.085512] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] self.force_reraise() [ 763.085512] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.085512] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] raise self.value [ 763.085512] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 763.085512] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] updated_port = self._update_port( [ 763.085512] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.085512] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] _ensure_no_port_binding_failure(port) [ 763.085512] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.085512] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] raise exception.PortBindingFailed(port_id=port['id']) [ 763.085813] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] nova.exception.PortBindingFailed: Binding failed for port 5d40974a-fcbf-49fd-a14a-b03902417c3a, please check neutron logs for more information. [ 763.085813] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] [ 763.085813] env[61974]: INFO nova.compute.manager [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Terminating instance [ 763.085813] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Acquiring lock "refresh_cache-2e217cbc-4962-44c7-b054-b3ae135ef8bb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.085813] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Acquired lock "refresh_cache-2e217cbc-4962-44c7-b054-b3ae135ef8bb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.085813] env[61974]: DEBUG nova.network.neutron [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 763.121298] env[61974]: DEBUG nova.network.neutron [req-635cfe01-1708-4808-9544-79669737f8a8 req-a247f418-f9f8-4d47-b122-bab93ca29509 service nova] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.224045] env[61974]: DEBUG nova.network.neutron [req-635cfe01-1708-4808-9544-79669737f8a8 req-a247f418-f9f8-4d47-b122-bab93ca29509 service nova] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.257364] env[61974]: INFO nova.compute.manager [-] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Took 1.03 seconds to deallocate network for instance. [ 763.260178] env[61974]: DEBUG nova.compute.claims [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 763.260178] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.367620] env[61974]: DEBUG nova.network.neutron [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.458692] env[61974]: DEBUG nova.network.neutron [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.609866] env[61974]: DEBUG nova.network.neutron [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.731558] env[61974]: DEBUG oslo_concurrency.lockutils [req-635cfe01-1708-4808-9544-79669737f8a8 req-a247f418-f9f8-4d47-b122-bab93ca29509 service nova] Releasing lock "refresh_cache-7a465c7e-874d-4cd1-9c23-0ae249997114" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.731558] env[61974]: DEBUG nova.compute.manager [req-635cfe01-1708-4808-9544-79669737f8a8 req-a247f418-f9f8-4d47-b122-bab93ca29509 service nova] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Received event network-vif-deleted-105582ad-592c-4a57-9b75-d5a823ec809a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 763.795681] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84584fa-11f2-4e8f-8cf2-1166b73d25f6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.801042] env[61974]: DEBUG nova.network.neutron [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.805888] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4685090-7042-439a-8582-c357ca95f2a7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.838279] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1677aba-74ee-4e10-8ee7-7a9d49ff1593 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.847680] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513f0ee2-6982-48b2-bbc8-b0a141e95673 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.862844] env[61974]: DEBUG nova.compute.provider_tree [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 763.963212] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Releasing lock "refresh_cache-9f781418-6149-4c73-aaa0-20c8cbc8c482" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.963468] env[61974]: DEBUG nova.compute.manager [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 763.963653] env[61974]: DEBUG nova.compute.manager [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 763.963820] env[61974]: DEBUG nova.network.neutron [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 763.982327] env[61974]: DEBUG nova.network.neutron [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 764.303261] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Releasing lock "refresh_cache-2e217cbc-4962-44c7-b054-b3ae135ef8bb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.303698] env[61974]: DEBUG nova.compute.manager [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 764.304140] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 764.304217] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f310d375-5a5b-4f98-8503-e9239a2a4429 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.316231] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ab242f-902e-4bba-a1b4-8f0fa5f5dc60 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.345326] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2e217cbc-4962-44c7-b054-b3ae135ef8bb could not be found. [ 764.345594] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 764.345779] env[61974]: INFO nova.compute.manager [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Took 0.04 seconds to destroy the instance on the hypervisor. [ 764.346039] env[61974]: DEBUG oslo.service.loopingcall [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 764.346271] env[61974]: DEBUG nova.compute.manager [-] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 764.346364] env[61974]: DEBUG nova.network.neutron [-] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 764.363017] env[61974]: DEBUG nova.network.neutron [-] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 764.364783] env[61974]: DEBUG nova.scheduler.client.report [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 764.487589] env[61974]: DEBUG nova.network.neutron [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.686828] env[61974]: DEBUG nova.compute.manager [req-4bc8472e-6860-443d-bc1b-def39d71a609 req-c7c964f5-de67-4f3b-a27a-7c06590590fe service nova] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Received event network-changed-5d40974a-fcbf-49fd-a14a-b03902417c3a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 764.690508] env[61974]: DEBUG nova.compute.manager [req-4bc8472e-6860-443d-bc1b-def39d71a609 req-c7c964f5-de67-4f3b-a27a-7c06590590fe service nova] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Refreshing instance network info cache due to event network-changed-5d40974a-fcbf-49fd-a14a-b03902417c3a. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 764.690724] env[61974]: DEBUG oslo_concurrency.lockutils [req-4bc8472e-6860-443d-bc1b-def39d71a609 req-c7c964f5-de67-4f3b-a27a-7c06590590fe service nova] Acquiring lock "refresh_cache-2e217cbc-4962-44c7-b054-b3ae135ef8bb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.690896] env[61974]: DEBUG oslo_concurrency.lockutils [req-4bc8472e-6860-443d-bc1b-def39d71a609 req-c7c964f5-de67-4f3b-a27a-7c06590590fe service nova] Acquired lock "refresh_cache-2e217cbc-4962-44c7-b054-b3ae135ef8bb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.691122] env[61974]: DEBUG nova.network.neutron [req-4bc8472e-6860-443d-bc1b-def39d71a609 req-c7c964f5-de67-4f3b-a27a-7c06590590fe service nova] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Refreshing network info cache for port 5d40974a-fcbf-49fd-a14a-b03902417c3a {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 764.864664] env[61974]: DEBUG nova.network.neutron [-] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.874182] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.038s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.874652] env[61974]: ERROR nova.compute.manager [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b46d1d24-64da-4fe2-bd3e-ed3715b818e7, please check neutron logs for more information. [ 764.874652] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Traceback (most recent call last): [ 764.874652] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 764.874652] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] self.driver.spawn(context, instance, image_meta, [ 764.874652] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 764.874652] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 764.874652] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 764.874652] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] vm_ref = self.build_virtual_machine(instance, [ 764.874652] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 764.874652] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] vif_infos = vmwarevif.get_vif_info(self._session, [ 764.874652] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 764.875025] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] for vif in network_info: [ 764.875025] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 764.875025] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] return self._sync_wrapper(fn, *args, **kwargs) [ 764.875025] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 764.875025] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] self.wait() [ 764.875025] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 764.875025] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] self[:] = self._gt.wait() [ 764.875025] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 764.875025] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] return self._exit_event.wait() [ 764.875025] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 764.875025] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] result = hub.switch() [ 764.875025] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 764.875025] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] return self.greenlet.switch() [ 764.875357] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 764.875357] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] result = function(*args, **kwargs) [ 764.875357] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 764.875357] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] return func(*args, **kwargs) [ 764.875357] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 764.875357] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] raise e [ 764.875357] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 764.875357] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] nwinfo = self.network_api.allocate_for_instance( [ 764.875357] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 764.875357] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] created_port_ids = self._update_ports_for_instance( [ 764.875357] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 764.875357] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] with excutils.save_and_reraise_exception(): [ 764.875357] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 764.875673] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] self.force_reraise() [ 764.875673] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 764.875673] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] raise self.value [ 764.875673] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 764.875673] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] updated_port = self._update_port( [ 764.875673] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 764.875673] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] _ensure_no_port_binding_failure(port) [ 764.875673] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 764.875673] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] raise exception.PortBindingFailed(port_id=port['id']) [ 764.875673] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] nova.exception.PortBindingFailed: Binding failed for port b46d1d24-64da-4fe2-bd3e-ed3715b818e7, please check neutron logs for more information. [ 764.875673] env[61974]: ERROR nova.compute.manager [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] [ 764.875983] env[61974]: DEBUG nova.compute.utils [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Binding failed for port b46d1d24-64da-4fe2-bd3e-ed3715b818e7, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 764.877744] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.233s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.879032] env[61974]: INFO nova.compute.claims [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 764.881767] env[61974]: DEBUG nova.compute.manager [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Build of instance 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff was re-scheduled: Binding failed for port b46d1d24-64da-4fe2-bd3e-ed3715b818e7, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 764.882276] env[61974]: DEBUG nova.compute.manager [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 764.882544] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Acquiring lock "refresh_cache-9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.882725] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Acquired lock "refresh_cache-9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.882947] env[61974]: DEBUG nova.network.neutron [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 764.991398] env[61974]: INFO nova.compute.manager [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] [instance: 9f781418-6149-4c73-aaa0-20c8cbc8c482] Took 1.03 seconds to deallocate network for instance. [ 765.210118] env[61974]: DEBUG nova.network.neutron [req-4bc8472e-6860-443d-bc1b-def39d71a609 req-c7c964f5-de67-4f3b-a27a-7c06590590fe service nova] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 765.306983] env[61974]: DEBUG nova.network.neutron [req-4bc8472e-6860-443d-bc1b-def39d71a609 req-c7c964f5-de67-4f3b-a27a-7c06590590fe service nova] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.366754] env[61974]: INFO nova.compute.manager [-] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Took 1.02 seconds to deallocate network for instance. [ 765.369290] env[61974]: DEBUG nova.compute.claims [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 765.369450] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.403717] env[61974]: DEBUG nova.network.neutron [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 765.458786] env[61974]: DEBUG nova.network.neutron [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.576829] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Acquiring lock "b62397bb-95b4-4d07-819a-bfcfd7c6a38e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.577185] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Lock "b62397bb-95b4-4d07-819a-bfcfd7c6a38e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.809305] env[61974]: DEBUG oslo_concurrency.lockutils [req-4bc8472e-6860-443d-bc1b-def39d71a609 req-c7c964f5-de67-4f3b-a27a-7c06590590fe service nova] Releasing lock "refresh_cache-2e217cbc-4962-44c7-b054-b3ae135ef8bb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.809566] env[61974]: DEBUG nova.compute.manager [req-4bc8472e-6860-443d-bc1b-def39d71a609 req-c7c964f5-de67-4f3b-a27a-7c06590590fe service nova] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Received event network-vif-deleted-5d40974a-fcbf-49fd-a14a-b03902417c3a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 765.962368] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Releasing lock "refresh_cache-9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.962621] env[61974]: DEBUG nova.compute.manager [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 765.962785] env[61974]: DEBUG nova.compute.manager [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 765.962950] env[61974]: DEBUG nova.network.neutron [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 765.976755] env[61974]: DEBUG nova.network.neutron [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 766.019685] env[61974]: INFO nova.scheduler.client.report [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Deleted allocations for instance 9f781418-6149-4c73-aaa0-20c8cbc8c482 [ 766.238850] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269d3b67-a41a-4f23-885f-cd044b247c4a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.246738] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4154c393-468e-4875-907d-477b0b198dd1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.278207] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661029a5-03e6-42c5-8fe0-05678bb48bbd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.285884] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62442b62-90fe-4885-94e1-66aedc595507 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.299498] env[61974]: DEBUG nova.compute.provider_tree [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 766.481202] env[61974]: DEBUG nova.network.neutron [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.530548] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ea8608fe-d573-4824-828e-05a1c2c99831 tempest-ServerMetadataTestJSON-2015747953 tempest-ServerMetadataTestJSON-2015747953-project-member] Lock "9f781418-6149-4c73-aaa0-20c8cbc8c482" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 131.579s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.803719] env[61974]: DEBUG nova.scheduler.client.report [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 766.985239] env[61974]: INFO nova.compute.manager [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] [instance: 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff] Took 1.02 seconds to deallocate network for instance. [ 767.032900] env[61974]: DEBUG nova.compute.manager [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 767.309610] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.432s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.310840] env[61974]: DEBUG nova.compute.manager [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 767.312700] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.700s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.565671] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.817947] env[61974]: DEBUG nova.compute.utils [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 767.822308] env[61974]: DEBUG nova.compute.manager [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 767.822482] env[61974]: DEBUG nova.network.neutron [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 767.873868] env[61974]: DEBUG nova.policy [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd495e2ef7963415f81907c0fadab593a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7f44fc2e4f284dcdafd1eb05d97d1cd0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 768.029527] env[61974]: INFO nova.scheduler.client.report [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Deleted allocations for instance 9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff [ 768.259864] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c059a9-6231-4e38-85de-c9b7909dad15 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.271407] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb676ba-2dfb-4e85-9b37-20db0ba1e48c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.306642] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-089f060a-9eaf-4fbe-b8d1-2e2758ab95dd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.316879] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc00818-2f98-417f-9927-0bc0a23b4468 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.330571] env[61974]: DEBUG nova.compute.manager [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 768.333978] env[61974]: DEBUG nova.compute.provider_tree [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 768.403851] env[61974]: DEBUG nova.network.neutron [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Successfully created port: 4e7ee498-7504-4417-bdee-0930fe9c9c61 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 768.540617] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f462b58a-3bf0-4987-a265-d270f133a4a6 tempest-ServerDiagnosticsTest-9530179 tempest-ServerDiagnosticsTest-9530179-project-member] Lock "9c3e8a1a-0bae-41f7-b94b-1c2b35ce76ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 131.701s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.840453] env[61974]: DEBUG nova.scheduler.client.report [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 769.043888] env[61974]: DEBUG nova.compute.manager [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 769.349583] env[61974]: DEBUG nova.compute.manager [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 769.352559] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.040s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.353663] env[61974]: ERROR nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fc801ec7-9ff3-4870-ace1-8fc2b9f6e51d, please check neutron logs for more information. [ 769.353663] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Traceback (most recent call last): [ 769.353663] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 769.353663] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] self.driver.spawn(context, instance, image_meta, [ 769.353663] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 769.353663] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 769.353663] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 769.353663] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] vm_ref = self.build_virtual_machine(instance, [ 769.353663] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 769.353663] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] vif_infos = vmwarevif.get_vif_info(self._session, [ 769.353663] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 769.355252] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] for vif in network_info: [ 769.355252] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 769.355252] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] return self._sync_wrapper(fn, *args, **kwargs) [ 769.355252] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 769.355252] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] self.wait() [ 769.355252] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 769.355252] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] self[:] = self._gt.wait() [ 769.355252] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 769.355252] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] return self._exit_event.wait() [ 769.355252] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 769.355252] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] current.throw(*self._exc) [ 769.355252] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 769.355252] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] result = function(*args, **kwargs) [ 769.355580] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 769.355580] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] return func(*args, **kwargs) [ 769.355580] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 769.355580] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] raise e [ 769.355580] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 769.355580] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] nwinfo = self.network_api.allocate_for_instance( [ 769.355580] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 769.355580] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] created_port_ids = self._update_ports_for_instance( [ 769.355580] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 769.355580] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] with excutils.save_and_reraise_exception(): [ 769.355580] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 769.355580] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] self.force_reraise() [ 769.355580] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 769.355943] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] raise self.value [ 769.355943] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 769.355943] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] updated_port = self._update_port( [ 769.355943] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 769.355943] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] _ensure_no_port_binding_failure(port) [ 769.355943] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 769.355943] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] raise exception.PortBindingFailed(port_id=port['id']) [ 769.355943] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] nova.exception.PortBindingFailed: Binding failed for port fc801ec7-9ff3-4870-ace1-8fc2b9f6e51d, please check neutron logs for more information. [ 769.355943] env[61974]: ERROR nova.compute.manager [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] [ 769.355943] env[61974]: DEBUG nova.compute.utils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Binding failed for port fc801ec7-9ff3-4870-ace1-8fc2b9f6e51d, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 769.356992] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.139s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.358520] env[61974]: INFO nova.compute.claims [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 769.363689] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Build of instance 99a03824-dd33-4916-84f7-4c911a98c9d1 was re-scheduled: Binding failed for port fc801ec7-9ff3-4870-ace1-8fc2b9f6e51d, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 769.364138] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 769.364367] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Acquiring lock "refresh_cache-99a03824-dd33-4916-84f7-4c911a98c9d1" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.365804] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Acquired lock "refresh_cache-99a03824-dd33-4916-84f7-4c911a98c9d1" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.365992] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 769.383458] env[61974]: DEBUG nova.virt.hardware [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 769.383699] env[61974]: DEBUG nova.virt.hardware [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 769.383846] env[61974]: DEBUG nova.virt.hardware [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 769.384023] env[61974]: DEBUG nova.virt.hardware [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 769.385056] env[61974]: DEBUG nova.virt.hardware [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 769.385283] env[61974]: DEBUG nova.virt.hardware [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 769.385596] env[61974]: DEBUG nova.virt.hardware [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 769.386059] env[61974]: DEBUG nova.virt.hardware [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 769.386255] env[61974]: DEBUG nova.virt.hardware [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 769.386575] env[61974]: DEBUG nova.virt.hardware [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 769.386764] env[61974]: DEBUG nova.virt.hardware [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 769.387713] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23dd8bee-e92b-4feb-b288-505b8597b872 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.402790] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f45ca309-3fa5-4d7e-9277-d1d1fce2e180 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.434474] env[61974]: DEBUG nova.compute.manager [req-d8836157-f163-4f2a-bc9b-eb35bd4dfb0c req-96364f8c-0cae-4498-b05b-114aeb68ecd7 service nova] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Received event network-changed-4e7ee498-7504-4417-bdee-0930fe9c9c61 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 769.434474] env[61974]: DEBUG nova.compute.manager [req-d8836157-f163-4f2a-bc9b-eb35bd4dfb0c req-96364f8c-0cae-4498-b05b-114aeb68ecd7 service nova] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Refreshing instance network info cache due to event network-changed-4e7ee498-7504-4417-bdee-0930fe9c9c61. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 769.434474] env[61974]: DEBUG oslo_concurrency.lockutils [req-d8836157-f163-4f2a-bc9b-eb35bd4dfb0c req-96364f8c-0cae-4498-b05b-114aeb68ecd7 service nova] Acquiring lock "refresh_cache-d334c2d8-15d8-4f70-9a85-312687d1b337" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.434474] env[61974]: DEBUG oslo_concurrency.lockutils [req-d8836157-f163-4f2a-bc9b-eb35bd4dfb0c req-96364f8c-0cae-4498-b05b-114aeb68ecd7 service nova] Acquired lock "refresh_cache-d334c2d8-15d8-4f70-9a85-312687d1b337" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.434474] env[61974]: DEBUG nova.network.neutron [req-d8836157-f163-4f2a-bc9b-eb35bd4dfb0c req-96364f8c-0cae-4498-b05b-114aeb68ecd7 service nova] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Refreshing network info cache for port 4e7ee498-7504-4417-bdee-0930fe9c9c61 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 769.577085] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.594300] env[61974]: ERROR nova.compute.manager [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4e7ee498-7504-4417-bdee-0930fe9c9c61, please check neutron logs for more information. [ 769.594300] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 769.594300] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 769.594300] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 769.594300] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 769.594300] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 769.594300] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 769.594300] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 769.594300] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 769.594300] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 769.594300] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 769.594300] env[61974]: ERROR nova.compute.manager raise self.value [ 769.594300] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 769.594300] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 769.594300] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 769.594300] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 769.594749] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 769.594749] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 769.594749] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4e7ee498-7504-4417-bdee-0930fe9c9c61, please check neutron logs for more information. [ 769.594749] env[61974]: ERROR nova.compute.manager [ 769.594749] env[61974]: Traceback (most recent call last): [ 769.594749] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 769.594749] env[61974]: listener.cb(fileno) [ 769.594749] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 769.594749] env[61974]: result = function(*args, **kwargs) [ 769.594749] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 769.594749] env[61974]: return func(*args, **kwargs) [ 769.594749] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 769.594749] env[61974]: raise e [ 769.594749] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 769.594749] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 769.594749] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 769.594749] env[61974]: created_port_ids = self._update_ports_for_instance( [ 769.594749] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 769.594749] env[61974]: with excutils.save_and_reraise_exception(): [ 769.594749] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 769.594749] env[61974]: self.force_reraise() [ 769.594749] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 769.594749] env[61974]: raise self.value [ 769.594749] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 769.594749] env[61974]: updated_port = self._update_port( [ 769.594749] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 769.594749] env[61974]: _ensure_no_port_binding_failure(port) [ 769.594749] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 769.594749] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 769.595456] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 4e7ee498-7504-4417-bdee-0930fe9c9c61, please check neutron logs for more information. [ 769.595456] env[61974]: Removing descriptor: 20 [ 769.595456] env[61974]: ERROR nova.compute.manager [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4e7ee498-7504-4417-bdee-0930fe9c9c61, please check neutron logs for more information. [ 769.595456] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Traceback (most recent call last): [ 769.595456] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 769.595456] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] yield resources [ 769.595456] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 769.595456] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] self.driver.spawn(context, instance, image_meta, [ 769.595456] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 769.595456] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] self._vmops.spawn(context, instance, image_meta, injected_files, [ 769.595456] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 769.595456] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] vm_ref = self.build_virtual_machine(instance, [ 769.595764] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 769.595764] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] vif_infos = vmwarevif.get_vif_info(self._session, [ 769.595764] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 769.595764] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] for vif in network_info: [ 769.595764] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 769.595764] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] return self._sync_wrapper(fn, *args, **kwargs) [ 769.595764] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 769.595764] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] self.wait() [ 769.595764] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 769.595764] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] self[:] = self._gt.wait() [ 769.595764] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 769.595764] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] return self._exit_event.wait() [ 769.595764] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 769.596113] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] result = hub.switch() [ 769.596113] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 769.596113] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] return self.greenlet.switch() [ 769.596113] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 769.596113] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] result = function(*args, **kwargs) [ 769.596113] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 769.596113] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] return func(*args, **kwargs) [ 769.596113] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 769.596113] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] raise e [ 769.596113] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 769.596113] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] nwinfo = self.network_api.allocate_for_instance( [ 769.596113] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 769.596113] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] created_port_ids = self._update_ports_for_instance( [ 769.596442] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 769.596442] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] with excutils.save_and_reraise_exception(): [ 769.596442] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 769.596442] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] self.force_reraise() [ 769.596442] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 769.596442] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] raise self.value [ 769.596442] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 769.596442] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] updated_port = self._update_port( [ 769.596442] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 769.596442] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] _ensure_no_port_binding_failure(port) [ 769.596442] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 769.596442] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] raise exception.PortBindingFailed(port_id=port['id']) [ 769.596767] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] nova.exception.PortBindingFailed: Binding failed for port 4e7ee498-7504-4417-bdee-0930fe9c9c61, please check neutron logs for more information. [ 769.596767] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] [ 769.596767] env[61974]: INFO nova.compute.manager [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Terminating instance [ 769.599518] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Acquiring lock "refresh_cache-d334c2d8-15d8-4f70-9a85-312687d1b337" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.893772] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 769.964794] env[61974]: DEBUG nova.network.neutron [req-d8836157-f163-4f2a-bc9b-eb35bd4dfb0c req-96364f8c-0cae-4498-b05b-114aeb68ecd7 service nova] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 770.074968] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.096295] env[61974]: DEBUG nova.network.neutron [req-d8836157-f163-4f2a-bc9b-eb35bd4dfb0c req-96364f8c-0cae-4498-b05b-114aeb68ecd7 service nova] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.578785] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Releasing lock "refresh_cache-99a03824-dd33-4916-84f7-4c911a98c9d1" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.579075] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 770.579592] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 770.580102] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 770.600948] env[61974]: DEBUG oslo_concurrency.lockutils [req-d8836157-f163-4f2a-bc9b-eb35bd4dfb0c req-96364f8c-0cae-4498-b05b-114aeb68ecd7 service nova] Releasing lock "refresh_cache-d334c2d8-15d8-4f70-9a85-312687d1b337" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.600948] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Acquired lock "refresh_cache-d334c2d8-15d8-4f70-9a85-312687d1b337" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.600948] env[61974]: DEBUG nova.network.neutron [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 770.608430] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 770.733080] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff5db0d2-89d7-4bac-bae4-92a11541bafa {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.740371] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2fc6763-7712-47a7-9e07-2bda04ef8f55 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.770550] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f84d57-ec7b-4001-989b-4075e63df6e7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.778051] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-407be87b-349d-4d29-9c85-108ddf6af5d7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.791643] env[61974]: DEBUG nova.compute.provider_tree [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 771.111740] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.130271] env[61974]: DEBUG nova.network.neutron [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 771.288614] env[61974]: DEBUG nova.network.neutron [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.295097] env[61974]: DEBUG nova.scheduler.client.report [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 771.461545] env[61974]: DEBUG nova.compute.manager [req-9bc170ac-02bc-4651-8ee2-e69eed1bef7c req-3df1961a-9f28-47cf-acd3-e11f2ef15ccf service nova] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Received event network-vif-deleted-4e7ee498-7504-4417-bdee-0930fe9c9c61 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 771.615369] env[61974]: INFO nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 99a03824-dd33-4916-84f7-4c911a98c9d1] Took 1.04 seconds to deallocate network for instance. [ 771.792483] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Releasing lock "refresh_cache-d334c2d8-15d8-4f70-9a85-312687d1b337" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 771.793778] env[61974]: DEBUG nova.compute.manager [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 771.794190] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 771.795271] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9c4ee9f0-3c4a-4772-8921-e945e594db3e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.799659] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.443s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.804207] env[61974]: DEBUG nova.compute.manager [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 771.811174] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.028s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.816309] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b52c0eae-7781-44f1-887b-da30041e6d4b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.845272] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d334c2d8-15d8-4f70-9a85-312687d1b337 could not be found. [ 771.845272] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 771.845272] env[61974]: INFO nova.compute.manager [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Took 0.05 seconds to destroy the instance on the hypervisor. [ 771.845272] env[61974]: DEBUG oslo.service.loopingcall [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 771.845272] env[61974]: DEBUG nova.compute.manager [-] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 771.845272] env[61974]: DEBUG nova.network.neutron [-] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 771.861946] env[61974]: DEBUG nova.network.neutron [-] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 772.319496] env[61974]: DEBUG nova.compute.utils [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 772.326611] env[61974]: DEBUG nova.compute.manager [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 772.326896] env[61974]: DEBUG nova.network.neutron [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 772.363924] env[61974]: DEBUG nova.network.neutron [-] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.417008] env[61974]: DEBUG nova.policy [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '78aa33b5e872455f939b2605adc12ed7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '46ebc2cdf0dd4fc7954f855a5cd4c4c3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 772.649781] env[61974]: INFO nova.scheduler.client.report [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Deleted allocations for instance 99a03824-dd33-4916-84f7-4c911a98c9d1 [ 772.807932] env[61974]: DEBUG nova.network.neutron [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Successfully created port: 5957485a-ed0d-498b-94bb-d13ae3849afd {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 772.831633] env[61974]: DEBUG nova.compute.manager [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 772.853684] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445d86ec-cc5b-47a6-bea2-de1dda1b8e26 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.862102] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ee929d-28f5-4411-b597-bce7831ea809 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.867200] env[61974]: INFO nova.compute.manager [-] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Took 1.02 seconds to deallocate network for instance. [ 772.895913] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0919dd0-f578-4f61-b7ec-0d3379a5e6f6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.898833] env[61974]: DEBUG nova.compute.claims [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 772.899021] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.905206] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-272c3f9f-f24f-45d7-8561-e759bb102cb2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.919744] env[61974]: DEBUG nova.compute.provider_tree [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 773.155870] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 773.156133] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 773.156289] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Starting heal instance info cache {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 773.156409] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Rebuilding the list of instances to heal {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 773.162356] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Lock "99a03824-dd33-4916-84f7-4c911a98c9d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 129.609s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.427112] env[61974]: DEBUG nova.scheduler.client.report [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 773.660699] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: a333f129-6a86-4715-83e2-79543620d013] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 773.660864] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 773.660991] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 773.661155] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 773.661453] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 773.661453] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Didn't find any instances for network info cache update. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 773.661582] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 773.661804] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 773.662269] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 773.663666] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 773.663666] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 773.663666] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 773.663666] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 773.663666] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager.update_available_resource {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 773.665050] env[61974]: DEBUG nova.compute.manager [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 773.844306] env[61974]: DEBUG nova.compute.manager [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 773.870381] env[61974]: DEBUG nova.virt.hardware [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 773.870628] env[61974]: DEBUG nova.virt.hardware [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 773.870783] env[61974]: DEBUG nova.virt.hardware [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 773.871123] env[61974]: DEBUG nova.virt.hardware [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 773.871123] env[61974]: DEBUG nova.virt.hardware [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 773.871250] env[61974]: DEBUG nova.virt.hardware [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 773.871476] env[61974]: DEBUG nova.virt.hardware [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 773.871606] env[61974]: DEBUG nova.virt.hardware [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 773.871769] env[61974]: DEBUG nova.virt.hardware [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 773.871927] env[61974]: DEBUG nova.virt.hardware [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 773.872110] env[61974]: DEBUG nova.virt.hardware [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 773.872979] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-546f9aef-d3f9-45b1-9fc2-3918511607e9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.880963] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7df0e2d-a0e7-4f10-a1e8-54155bdd3789 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.933095] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.124s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.933763] env[61974]: ERROR nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b0a4e268-72a6-410a-80c5-90ef7c6ea789, please check neutron logs for more information. [ 773.933763] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Traceback (most recent call last): [ 773.933763] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 773.933763] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] self.driver.spawn(context, instance, image_meta, [ 773.933763] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 773.933763] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] self._vmops.spawn(context, instance, image_meta, injected_files, [ 773.933763] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 773.933763] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] vm_ref = self.build_virtual_machine(instance, [ 773.933763] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 773.933763] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] vif_infos = vmwarevif.get_vif_info(self._session, [ 773.933763] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 773.934301] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] for vif in network_info: [ 773.934301] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 773.934301] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] return self._sync_wrapper(fn, *args, **kwargs) [ 773.934301] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 773.934301] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] self.wait() [ 773.934301] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 773.934301] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] self[:] = self._gt.wait() [ 773.934301] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 773.934301] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] return self._exit_event.wait() [ 773.934301] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 773.934301] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] result = hub.switch() [ 773.934301] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 773.934301] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] return self.greenlet.switch() [ 773.934831] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 773.934831] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] result = function(*args, **kwargs) [ 773.934831] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 773.934831] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] return func(*args, **kwargs) [ 773.934831] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 773.934831] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] raise e [ 773.934831] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 773.934831] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] nwinfo = self.network_api.allocate_for_instance( [ 773.934831] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 773.934831] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] created_port_ids = self._update_ports_for_instance( [ 773.934831] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 773.934831] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] with excutils.save_and_reraise_exception(): [ 773.934831] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 773.935266] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] self.force_reraise() [ 773.935266] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 773.935266] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] raise self.value [ 773.935266] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 773.935266] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] updated_port = self._update_port( [ 773.935266] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 773.935266] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] _ensure_no_port_binding_failure(port) [ 773.935266] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 773.935266] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] raise exception.PortBindingFailed(port_id=port['id']) [ 773.935266] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] nova.exception.PortBindingFailed: Binding failed for port b0a4e268-72a6-410a-80c5-90ef7c6ea789, please check neutron logs for more information. [ 773.935266] env[61974]: ERROR nova.compute.manager [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] [ 773.935558] env[61974]: DEBUG nova.compute.utils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Binding failed for port b0a4e268-72a6-410a-80c5-90ef7c6ea789, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 773.935735] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.992s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.937845] env[61974]: INFO nova.compute.claims [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 773.941437] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Build of instance 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40 was re-scheduled: Binding failed for port b0a4e268-72a6-410a-80c5-90ef7c6ea789, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 773.942091] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 773.942333] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Acquiring lock "refresh_cache-3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.942481] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Acquired lock "refresh_cache-3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.942801] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 773.977479] env[61974]: DEBUG nova.compute.manager [req-8292df02-48ff-4694-ab01-10618ba56740 req-e5bcb0cd-0092-48a2-8d9f-a9789bad6d43 service nova] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Received event network-changed-5957485a-ed0d-498b-94bb-d13ae3849afd {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 773.977668] env[61974]: DEBUG nova.compute.manager [req-8292df02-48ff-4694-ab01-10618ba56740 req-e5bcb0cd-0092-48a2-8d9f-a9789bad6d43 service nova] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Refreshing instance network info cache due to event network-changed-5957485a-ed0d-498b-94bb-d13ae3849afd. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 773.977874] env[61974]: DEBUG oslo_concurrency.lockutils [req-8292df02-48ff-4694-ab01-10618ba56740 req-e5bcb0cd-0092-48a2-8d9f-a9789bad6d43 service nova] Acquiring lock "refresh_cache-a1c488d6-4eb4-4362-84cd-68151a47d3bd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.978043] env[61974]: DEBUG oslo_concurrency.lockutils [req-8292df02-48ff-4694-ab01-10618ba56740 req-e5bcb0cd-0092-48a2-8d9f-a9789bad6d43 service nova] Acquired lock "refresh_cache-a1c488d6-4eb4-4362-84cd-68151a47d3bd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.978269] env[61974]: DEBUG nova.network.neutron [req-8292df02-48ff-4694-ab01-10618ba56740 req-e5bcb0cd-0092-48a2-8d9f-a9789bad6d43 service nova] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Refreshing network info cache for port 5957485a-ed0d-498b-94bb-d13ae3849afd {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 774.128488] env[61974]: ERROR nova.compute.manager [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5957485a-ed0d-498b-94bb-d13ae3849afd, please check neutron logs for more information. [ 774.128488] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 774.128488] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 774.128488] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 774.128488] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 774.128488] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 774.128488] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 774.128488] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 774.128488] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 774.128488] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 774.128488] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 774.128488] env[61974]: ERROR nova.compute.manager raise self.value [ 774.128488] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 774.128488] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 774.128488] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 774.128488] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 774.128968] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 774.128968] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 774.128968] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5957485a-ed0d-498b-94bb-d13ae3849afd, please check neutron logs for more information. [ 774.128968] env[61974]: ERROR nova.compute.manager [ 774.128968] env[61974]: Traceback (most recent call last): [ 774.128968] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 774.128968] env[61974]: listener.cb(fileno) [ 774.128968] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 774.128968] env[61974]: result = function(*args, **kwargs) [ 774.128968] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 774.128968] env[61974]: return func(*args, **kwargs) [ 774.128968] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 774.128968] env[61974]: raise e [ 774.128968] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 774.128968] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 774.128968] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 774.128968] env[61974]: created_port_ids = self._update_ports_for_instance( [ 774.128968] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 774.128968] env[61974]: with excutils.save_and_reraise_exception(): [ 774.128968] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 774.128968] env[61974]: self.force_reraise() [ 774.128968] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 774.128968] env[61974]: raise self.value [ 774.128968] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 774.128968] env[61974]: updated_port = self._update_port( [ 774.128968] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 774.128968] env[61974]: _ensure_no_port_binding_failure(port) [ 774.128968] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 774.128968] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 774.129727] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 5957485a-ed0d-498b-94bb-d13ae3849afd, please check neutron logs for more information. [ 774.129727] env[61974]: Removing descriptor: 21 [ 774.129727] env[61974]: ERROR nova.compute.manager [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5957485a-ed0d-498b-94bb-d13ae3849afd, please check neutron logs for more information. [ 774.129727] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Traceback (most recent call last): [ 774.129727] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 774.129727] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] yield resources [ 774.129727] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 774.129727] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] self.driver.spawn(context, instance, image_meta, [ 774.129727] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 774.129727] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 774.129727] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 774.129727] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] vm_ref = self.build_virtual_machine(instance, [ 774.130069] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 774.130069] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] vif_infos = vmwarevif.get_vif_info(self._session, [ 774.130069] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 774.130069] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] for vif in network_info: [ 774.130069] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 774.130069] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] return self._sync_wrapper(fn, *args, **kwargs) [ 774.130069] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 774.130069] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] self.wait() [ 774.130069] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 774.130069] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] self[:] = self._gt.wait() [ 774.130069] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 774.130069] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] return self._exit_event.wait() [ 774.130069] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 774.130430] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] result = hub.switch() [ 774.130430] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 774.130430] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] return self.greenlet.switch() [ 774.130430] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 774.130430] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] result = function(*args, **kwargs) [ 774.130430] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 774.130430] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] return func(*args, **kwargs) [ 774.130430] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 774.130430] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] raise e [ 774.130430] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 774.130430] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] nwinfo = self.network_api.allocate_for_instance( [ 774.130430] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 774.130430] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] created_port_ids = self._update_ports_for_instance( [ 774.130870] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 774.130870] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] with excutils.save_and_reraise_exception(): [ 774.130870] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 774.130870] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] self.force_reraise() [ 774.130870] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 774.130870] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] raise self.value [ 774.130870] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 774.130870] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] updated_port = self._update_port( [ 774.130870] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 774.130870] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] _ensure_no_port_binding_failure(port) [ 774.130870] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 774.130870] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] raise exception.PortBindingFailed(port_id=port['id']) [ 774.131193] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] nova.exception.PortBindingFailed: Binding failed for port 5957485a-ed0d-498b-94bb-d13ae3849afd, please check neutron logs for more information. [ 774.131193] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] [ 774.131193] env[61974]: INFO nova.compute.manager [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Terminating instance [ 774.132195] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Acquiring lock "refresh_cache-a1c488d6-4eb4-4362-84cd-68151a47d3bd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.172113] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.186897] env[61974]: DEBUG oslo_concurrency.lockutils [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.460692] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 774.512217] env[61974]: DEBUG nova.network.neutron [req-8292df02-48ff-4694-ab01-10618ba56740 req-e5bcb0cd-0092-48a2-8d9f-a9789bad6d43 service nova] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 774.592190] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.652291] env[61974]: DEBUG nova.network.neutron [req-8292df02-48ff-4694-ab01-10618ba56740 req-e5bcb0cd-0092-48a2-8d9f-a9789bad6d43 service nova] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.675610] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Acquiring lock "11d4f981-b167-4c81-9cd7-7e939606d400" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.675842] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Lock "11d4f981-b167-4c81-9cd7-7e939606d400" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.095770] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Releasing lock "refresh_cache-3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.095770] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 775.095770] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 775.095770] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 775.115013] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 775.154905] env[61974]: DEBUG oslo_concurrency.lockutils [req-8292df02-48ff-4694-ab01-10618ba56740 req-e5bcb0cd-0092-48a2-8d9f-a9789bad6d43 service nova] Releasing lock "refresh_cache-a1c488d6-4eb4-4362-84cd-68151a47d3bd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.155343] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Acquired lock "refresh_cache-a1c488d6-4eb4-4362-84cd-68151a47d3bd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.155534] env[61974]: DEBUG nova.network.neutron [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 775.270824] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c25d33b3-6d6f-46c7-b59e-2a8587a0bc9d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.278247] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70818fe8-4854-4b7c-939a-a086b297a9b3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.311421] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace4fa77-4235-4e7c-9803-d37bd981bcd3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.319099] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3c58da-17ed-400e-9486-43366b8e55a3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.332088] env[61974]: DEBUG nova.compute.provider_tree [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 775.617485] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.676101] env[61974]: DEBUG nova.network.neutron [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 775.763015] env[61974]: DEBUG nova.network.neutron [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.834964] env[61974]: DEBUG nova.scheduler.client.report [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 776.002649] env[61974]: DEBUG nova.compute.manager [req-e88a570e-7957-4202-ad30-20fc1749b1d8 req-850707ca-cc9e-4950-836d-c5acb350fb48 service nova] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Received event network-vif-deleted-5957485a-ed0d-498b-94bb-d13ae3849afd {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 776.123812] env[61974]: INFO nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40] Took 1.03 seconds to deallocate network for instance. [ 776.265623] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Releasing lock "refresh_cache-a1c488d6-4eb4-4362-84cd-68151a47d3bd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.266056] env[61974]: DEBUG nova.compute.manager [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 776.266253] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 776.266544] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-11b67e90-7feb-40c8-b138-36952845d977 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.275432] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b106e901-5a7b-49ac-92fc-a49f7ca51c23 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.298464] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a1c488d6-4eb4-4362-84cd-68151a47d3bd could not be found. [ 776.298730] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 776.298925] env[61974]: INFO nova.compute.manager [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Took 0.03 seconds to destroy the instance on the hypervisor. [ 776.299184] env[61974]: DEBUG oslo.service.loopingcall [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 776.299429] env[61974]: DEBUG nova.compute.manager [-] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 776.299572] env[61974]: DEBUG nova.network.neutron [-] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 776.315032] env[61974]: DEBUG nova.network.neutron [-] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 776.339630] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.404s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.340159] env[61974]: DEBUG nova.compute.manager [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 776.342528] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.188s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.817454] env[61974]: DEBUG nova.network.neutron [-] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.847626] env[61974]: DEBUG nova.compute.utils [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 776.851811] env[61974]: DEBUG nova.compute.manager [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 776.852104] env[61974]: DEBUG nova.network.neutron [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 776.893921] env[61974]: DEBUG nova.policy [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c5f6d80a0784b1f8ff2b2fcfbb44232', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '40e43abf62a5464091aa725e1cff2b50', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 777.156443] env[61974]: INFO nova.scheduler.client.report [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Deleted allocations for instance 3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40 [ 777.179336] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eaf4884-fdfb-4004-9a4c-7a42688e8587 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.193059] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3698a5-9a84-44e8-943b-a4a7addf7b57 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.228326] env[61974]: DEBUG nova.network.neutron [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Successfully created port: 4b51d819-b080-4a77-aeb5-a352cd3ff6c4 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 777.230733] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af76c479-bc90-490a-bab9-52808f82b557 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.238270] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07886d7e-4412-43a0-9440-6efa5b997ea8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.252039] env[61974]: DEBUG nova.compute.provider_tree [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 777.320349] env[61974]: INFO nova.compute.manager [-] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Took 1.02 seconds to deallocate network for instance. [ 777.323610] env[61974]: DEBUG nova.compute.claims [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 777.323704] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.352779] env[61974]: DEBUG nova.compute.manager [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 777.667024] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Lock "3a3382c3-6fb8-4d9f-b6ce-6abe27dc1c40" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 134.074s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.755467] env[61974]: DEBUG nova.scheduler.client.report [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 778.070408] env[61974]: DEBUG nova.compute.manager [req-92933f33-0b47-4071-9caf-2dbb85ea057e req-d4bc793c-06c7-4fdc-96f2-ff8eb6dc7ecb service nova] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Received event network-changed-4b51d819-b080-4a77-aeb5-a352cd3ff6c4 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 778.070408] env[61974]: DEBUG nova.compute.manager [req-92933f33-0b47-4071-9caf-2dbb85ea057e req-d4bc793c-06c7-4fdc-96f2-ff8eb6dc7ecb service nova] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Refreshing instance network info cache due to event network-changed-4b51d819-b080-4a77-aeb5-a352cd3ff6c4. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 778.070408] env[61974]: DEBUG oslo_concurrency.lockutils [req-92933f33-0b47-4071-9caf-2dbb85ea057e req-d4bc793c-06c7-4fdc-96f2-ff8eb6dc7ecb service nova] Acquiring lock "refresh_cache-955ed729-6cbc-4a7a-9abf-c1078cd4ddbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.070408] env[61974]: DEBUG oslo_concurrency.lockutils [req-92933f33-0b47-4071-9caf-2dbb85ea057e req-d4bc793c-06c7-4fdc-96f2-ff8eb6dc7ecb service nova] Acquired lock "refresh_cache-955ed729-6cbc-4a7a-9abf-c1078cd4ddbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.070408] env[61974]: DEBUG nova.network.neutron [req-92933f33-0b47-4071-9caf-2dbb85ea057e req-d4bc793c-06c7-4fdc-96f2-ff8eb6dc7ecb service nova] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Refreshing network info cache for port 4b51d819-b080-4a77-aeb5-a352cd3ff6c4 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 778.169154] env[61974]: DEBUG nova.compute.manager [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 778.263321] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.921s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.263949] env[61974]: ERROR nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 617e8233-03fc-4fc1-8e63-0ea60fa021ee, please check neutron logs for more information. [ 778.263949] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] Traceback (most recent call last): [ 778.263949] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 778.263949] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] self.driver.spawn(context, instance, image_meta, [ 778.263949] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 778.263949] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] self._vmops.spawn(context, instance, image_meta, injected_files, [ 778.263949] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 778.263949] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] vm_ref = self.build_virtual_machine(instance, [ 778.263949] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 778.263949] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] vif_infos = vmwarevif.get_vif_info(self._session, [ 778.263949] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 778.264286] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] for vif in network_info: [ 778.264286] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 778.264286] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] return self._sync_wrapper(fn, *args, **kwargs) [ 778.264286] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 778.264286] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] self.wait() [ 778.264286] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 778.264286] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] self[:] = self._gt.wait() [ 778.264286] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 778.264286] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] return self._exit_event.wait() [ 778.264286] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 778.264286] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] result = hub.switch() [ 778.264286] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 778.264286] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] return self.greenlet.switch() [ 778.264676] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 778.264676] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] result = function(*args, **kwargs) [ 778.264676] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 778.264676] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] return func(*args, **kwargs) [ 778.264676] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 778.264676] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] raise e [ 778.264676] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 778.264676] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] nwinfo = self.network_api.allocate_for_instance( [ 778.264676] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 778.264676] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] created_port_ids = self._update_ports_for_instance( [ 778.264676] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 778.264676] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] with excutils.save_and_reraise_exception(): [ 778.264676] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 778.265055] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] self.force_reraise() [ 778.265055] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 778.265055] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] raise self.value [ 778.265055] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 778.265055] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] updated_port = self._update_port( [ 778.265055] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 778.265055] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] _ensure_no_port_binding_failure(port) [ 778.265055] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 778.265055] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] raise exception.PortBindingFailed(port_id=port['id']) [ 778.265055] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] nova.exception.PortBindingFailed: Binding failed for port 617e8233-03fc-4fc1-8e63-0ea60fa021ee, please check neutron logs for more information. [ 778.265055] env[61974]: ERROR nova.compute.manager [instance: a333f129-6a86-4715-83e2-79543620d013] [ 778.265383] env[61974]: DEBUG nova.compute.utils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Binding failed for port 617e8233-03fc-4fc1-8e63-0ea60fa021ee, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 778.266255] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.006s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.272156] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Build of instance a333f129-6a86-4715-83e2-79543620d013 was re-scheduled: Binding failed for port 617e8233-03fc-4fc1-8e63-0ea60fa021ee, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 778.273465] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 778.273691] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Acquiring lock "refresh_cache-a333f129-6a86-4715-83e2-79543620d013" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.273837] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Acquired lock "refresh_cache-a333f129-6a86-4715-83e2-79543620d013" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.273995] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 778.295717] env[61974]: ERROR nova.compute.manager [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4b51d819-b080-4a77-aeb5-a352cd3ff6c4, please check neutron logs for more information. [ 778.295717] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 778.295717] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 778.295717] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 778.295717] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 778.295717] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 778.295717] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 778.295717] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 778.295717] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 778.295717] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 778.295717] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 778.295717] env[61974]: ERROR nova.compute.manager raise self.value [ 778.295717] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 778.295717] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 778.295717] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 778.295717] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 778.296299] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 778.296299] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 778.296299] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4b51d819-b080-4a77-aeb5-a352cd3ff6c4, please check neutron logs for more information. [ 778.296299] env[61974]: ERROR nova.compute.manager [ 778.296299] env[61974]: Traceback (most recent call last): [ 778.296299] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 778.296299] env[61974]: listener.cb(fileno) [ 778.296299] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 778.296299] env[61974]: result = function(*args, **kwargs) [ 778.296299] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 778.296299] env[61974]: return func(*args, **kwargs) [ 778.296299] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 778.296299] env[61974]: raise e [ 778.296299] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 778.296299] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 778.296299] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 778.296299] env[61974]: created_port_ids = self._update_ports_for_instance( [ 778.296299] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 778.296299] env[61974]: with excutils.save_and_reraise_exception(): [ 778.296299] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 778.296299] env[61974]: self.force_reraise() [ 778.296299] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 778.296299] env[61974]: raise self.value [ 778.296299] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 778.296299] env[61974]: updated_port = self._update_port( [ 778.296299] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 778.296299] env[61974]: _ensure_no_port_binding_failure(port) [ 778.296299] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 778.296299] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 778.297219] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 4b51d819-b080-4a77-aeb5-a352cd3ff6c4, please check neutron logs for more information. [ 778.297219] env[61974]: Removing descriptor: 20 [ 778.374302] env[61974]: DEBUG nova.compute.manager [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 778.401602] env[61974]: DEBUG nova.virt.hardware [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 778.401874] env[61974]: DEBUG nova.virt.hardware [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 778.402038] env[61974]: DEBUG nova.virt.hardware [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 778.402223] env[61974]: DEBUG nova.virt.hardware [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 778.402370] env[61974]: DEBUG nova.virt.hardware [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 778.402514] env[61974]: DEBUG nova.virt.hardware [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 778.402718] env[61974]: DEBUG nova.virt.hardware [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 778.402874] env[61974]: DEBUG nova.virt.hardware [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 778.403047] env[61974]: DEBUG nova.virt.hardware [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 778.403211] env[61974]: DEBUG nova.virt.hardware [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 778.403407] env[61974]: DEBUG nova.virt.hardware [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 778.404282] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c68648e3-8c4c-49f0-b21a-b8e14486a7b2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.412460] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0c8a1d-a7b7-4059-91a5-8a9a5ac5986b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.426047] env[61974]: ERROR nova.compute.manager [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4b51d819-b080-4a77-aeb5-a352cd3ff6c4, please check neutron logs for more information. [ 778.426047] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Traceback (most recent call last): [ 778.426047] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 778.426047] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] yield resources [ 778.426047] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 778.426047] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] self.driver.spawn(context, instance, image_meta, [ 778.426047] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 778.426047] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 778.426047] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 778.426047] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] vm_ref = self.build_virtual_machine(instance, [ 778.426047] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 778.426441] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] vif_infos = vmwarevif.get_vif_info(self._session, [ 778.426441] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 778.426441] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] for vif in network_info: [ 778.426441] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 778.426441] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] return self._sync_wrapper(fn, *args, **kwargs) [ 778.426441] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 778.426441] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] self.wait() [ 778.426441] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 778.426441] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] self[:] = self._gt.wait() [ 778.426441] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 778.426441] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] return self._exit_event.wait() [ 778.426441] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 778.426441] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] current.throw(*self._exc) [ 778.426801] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 778.426801] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] result = function(*args, **kwargs) [ 778.426801] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 778.426801] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] return func(*args, **kwargs) [ 778.426801] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 778.426801] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] raise e [ 778.426801] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 778.426801] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] nwinfo = self.network_api.allocate_for_instance( [ 778.426801] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 778.426801] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] created_port_ids = self._update_ports_for_instance( [ 778.426801] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 778.426801] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] with excutils.save_and_reraise_exception(): [ 778.426801] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 778.427257] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] self.force_reraise() [ 778.427257] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 778.427257] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] raise self.value [ 778.427257] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 778.427257] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] updated_port = self._update_port( [ 778.427257] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 778.427257] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] _ensure_no_port_binding_failure(port) [ 778.427257] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 778.427257] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] raise exception.PortBindingFailed(port_id=port['id']) [ 778.427257] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] nova.exception.PortBindingFailed: Binding failed for port 4b51d819-b080-4a77-aeb5-a352cd3ff6c4, please check neutron logs for more information. [ 778.427257] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] [ 778.427257] env[61974]: INFO nova.compute.manager [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Terminating instance [ 778.428308] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "refresh_cache-955ed729-6cbc-4a7a-9abf-c1078cd4ddbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.588412] env[61974]: DEBUG nova.network.neutron [req-92933f33-0b47-4071-9caf-2dbb85ea057e req-d4bc793c-06c7-4fdc-96f2-ff8eb6dc7ecb service nova] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.675730] env[61974]: DEBUG nova.network.neutron [req-92933f33-0b47-4071-9caf-2dbb85ea057e req-d4bc793c-06c7-4fdc-96f2-ff8eb6dc7ecb service nova] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.691237] env[61974]: DEBUG oslo_concurrency.lockutils [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.792468] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.893365] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.072450] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edfb0bd2-345b-43a4-8982-83e16782d920 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.079826] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc8541c2-5bb4-434a-b128-9ae0e524c45b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.110351] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-355255ea-0590-4962-9886-e46ceed27f52 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.117289] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f955f2a-4b40-4e5d-ba41-7133866c48a1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.130509] env[61974]: DEBUG nova.compute.provider_tree [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.179692] env[61974]: DEBUG oslo_concurrency.lockutils [req-92933f33-0b47-4071-9caf-2dbb85ea057e req-d4bc793c-06c7-4fdc-96f2-ff8eb6dc7ecb service nova] Releasing lock "refresh_cache-955ed729-6cbc-4a7a-9abf-c1078cd4ddbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.179820] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquired lock "refresh_cache-955ed729-6cbc-4a7a-9abf-c1078cd4ddbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.180009] env[61974]: DEBUG nova.network.neutron [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 779.397787] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Releasing lock "refresh_cache-a333f129-6a86-4715-83e2-79543620d013" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.398047] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 779.398357] env[61974]: DEBUG nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 779.398425] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 779.414385] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 779.632951] env[61974]: DEBUG nova.scheduler.client.report [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 779.701624] env[61974]: DEBUG nova.network.neutron [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 779.770289] env[61974]: DEBUG nova.network.neutron [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.917584] env[61974]: DEBUG nova.network.neutron [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.092360] env[61974]: DEBUG nova.compute.manager [req-2b941ea0-b416-4885-ac5e-edde36ce2c99 req-db78f5a0-6e23-43e0-8053-1f19d324d9ad service nova] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Received event network-vif-deleted-4b51d819-b080-4a77-aeb5-a352cd3ff6c4 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 780.138361] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.872s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.138989] env[61974]: ERROR nova.compute.manager [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 105582ad-592c-4a57-9b75-d5a823ec809a, please check neutron logs for more information. [ 780.138989] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Traceback (most recent call last): [ 780.138989] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 780.138989] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] self.driver.spawn(context, instance, image_meta, [ 780.138989] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 780.138989] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] self._vmops.spawn(context, instance, image_meta, injected_files, [ 780.138989] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 780.138989] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] vm_ref = self.build_virtual_machine(instance, [ 780.138989] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 780.138989] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] vif_infos = vmwarevif.get_vif_info(self._session, [ 780.138989] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 780.139370] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] for vif in network_info: [ 780.139370] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 780.139370] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] return self._sync_wrapper(fn, *args, **kwargs) [ 780.139370] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 780.139370] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] self.wait() [ 780.139370] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 780.139370] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] self[:] = self._gt.wait() [ 780.139370] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 780.139370] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] return self._exit_event.wait() [ 780.139370] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 780.139370] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] result = hub.switch() [ 780.139370] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 780.139370] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] return self.greenlet.switch() [ 780.139843] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 780.139843] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] result = function(*args, **kwargs) [ 780.139843] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 780.139843] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] return func(*args, **kwargs) [ 780.139843] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 780.139843] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] raise e [ 780.139843] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 780.139843] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] nwinfo = self.network_api.allocate_for_instance( [ 780.139843] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 780.139843] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] created_port_ids = self._update_ports_for_instance( [ 780.139843] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 780.139843] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] with excutils.save_and_reraise_exception(): [ 780.139843] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 780.140275] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] self.force_reraise() [ 780.140275] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 780.140275] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] raise self.value [ 780.140275] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 780.140275] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] updated_port = self._update_port( [ 780.140275] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 780.140275] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] _ensure_no_port_binding_failure(port) [ 780.140275] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 780.140275] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] raise exception.PortBindingFailed(port_id=port['id']) [ 780.140275] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] nova.exception.PortBindingFailed: Binding failed for port 105582ad-592c-4a57-9b75-d5a823ec809a, please check neutron logs for more information. [ 780.140275] env[61974]: ERROR nova.compute.manager [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] [ 780.140757] env[61974]: DEBUG nova.compute.utils [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Binding failed for port 105582ad-592c-4a57-9b75-d5a823ec809a, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 780.141010] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.771s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.143924] env[61974]: DEBUG nova.compute.manager [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Build of instance 7a465c7e-874d-4cd1-9c23-0ae249997114 was re-scheduled: Binding failed for port 105582ad-592c-4a57-9b75-d5a823ec809a, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 780.144356] env[61974]: DEBUG nova.compute.manager [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 780.144580] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Acquiring lock "refresh_cache-7a465c7e-874d-4cd1-9c23-0ae249997114" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.144726] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Acquired lock "refresh_cache-7a465c7e-874d-4cd1-9c23-0ae249997114" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.144884] env[61974]: DEBUG nova.network.neutron [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 780.273135] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Releasing lock "refresh_cache-955ed729-6cbc-4a7a-9abf-c1078cd4ddbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.273536] env[61974]: DEBUG nova.compute.manager [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 780.273728] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 780.274046] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-87d30ef3-8687-4de1-9a6e-f2efaf28aa34 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.283337] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9d6386-4683-4813-90f3-df0d5414762e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.306831] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb could not be found. [ 780.307080] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 780.307295] env[61974]: INFO nova.compute.manager [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Took 0.03 seconds to destroy the instance on the hypervisor. [ 780.307556] env[61974]: DEBUG oslo.service.loopingcall [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 780.308131] env[61974]: DEBUG nova.compute.manager [-] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 780.308254] env[61974]: DEBUG nova.network.neutron [-] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 780.323443] env[61974]: DEBUG nova.network.neutron [-] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 780.423485] env[61974]: INFO nova.compute.manager [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] [instance: a333f129-6a86-4715-83e2-79543620d013] Took 1.02 seconds to deallocate network for instance. [ 780.669931] env[61974]: DEBUG nova.network.neutron [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 780.752314] env[61974]: DEBUG nova.network.neutron [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.826340] env[61974]: DEBUG nova.network.neutron [-] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.971549] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2abcd126-432a-42b8-9749-2a94fc254df6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.980074] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-204bfad1-5a00-4400-8013-c393d8e86699 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.010029] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-038db04c-35af-4dec-b6fb-8b069580b0cd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.017099] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03f513d-cc96-47f4-bca0-9a826b370086 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.031265] env[61974]: DEBUG nova.compute.provider_tree [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.256064] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Releasing lock "refresh_cache-7a465c7e-874d-4cd1-9c23-0ae249997114" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.256064] env[61974]: DEBUG nova.compute.manager [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 781.256064] env[61974]: DEBUG nova.compute.manager [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 781.256064] env[61974]: DEBUG nova.network.neutron [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 781.276064] env[61974]: DEBUG nova.network.neutron [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 781.331879] env[61974]: INFO nova.compute.manager [-] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Took 1.02 seconds to deallocate network for instance. [ 781.334093] env[61974]: DEBUG nova.compute.claims [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 781.334271] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.456823] env[61974]: INFO nova.scheduler.client.report [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Deleted allocations for instance a333f129-6a86-4715-83e2-79543620d013 [ 781.534666] env[61974]: DEBUG nova.scheduler.client.report [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 781.779945] env[61974]: DEBUG nova.network.neutron [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.964681] env[61974]: DEBUG oslo_concurrency.lockutils [None req-27d14c0b-f46f-46c5-9d4c-ce093e186ee6 tempest-ListServersNegativeTestJSON-1805277179 tempest-ListServersNegativeTestJSON-1805277179-project-member] Lock "a333f129-6a86-4715-83e2-79543620d013" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 138.289s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.039576] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.898s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.040208] env[61974]: ERROR nova.compute.manager [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5d40974a-fcbf-49fd-a14a-b03902417c3a, please check neutron logs for more information. [ 782.040208] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Traceback (most recent call last): [ 782.040208] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 782.040208] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] self.driver.spawn(context, instance, image_meta, [ 782.040208] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 782.040208] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 782.040208] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 782.040208] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] vm_ref = self.build_virtual_machine(instance, [ 782.040208] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 782.040208] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] vif_infos = vmwarevif.get_vif_info(self._session, [ 782.040208] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 782.040614] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] for vif in network_info: [ 782.040614] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 782.040614] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] return self._sync_wrapper(fn, *args, **kwargs) [ 782.040614] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 782.040614] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] self.wait() [ 782.040614] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 782.040614] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] self[:] = self._gt.wait() [ 782.040614] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 782.040614] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] return self._exit_event.wait() [ 782.040614] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 782.040614] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] result = hub.switch() [ 782.040614] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 782.040614] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] return self.greenlet.switch() [ 782.040966] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 782.040966] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] result = function(*args, **kwargs) [ 782.040966] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 782.040966] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] return func(*args, **kwargs) [ 782.040966] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 782.040966] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] raise e [ 782.040966] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 782.040966] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] nwinfo = self.network_api.allocate_for_instance( [ 782.040966] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 782.040966] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] created_port_ids = self._update_ports_for_instance( [ 782.040966] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 782.040966] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] with excutils.save_and_reraise_exception(): [ 782.040966] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 782.041365] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] self.force_reraise() [ 782.041365] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 782.041365] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] raise self.value [ 782.041365] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 782.041365] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] updated_port = self._update_port( [ 782.041365] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 782.041365] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] _ensure_no_port_binding_failure(port) [ 782.041365] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 782.041365] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] raise exception.PortBindingFailed(port_id=port['id']) [ 782.041365] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] nova.exception.PortBindingFailed: Binding failed for port 5d40974a-fcbf-49fd-a14a-b03902417c3a, please check neutron logs for more information. [ 782.041365] env[61974]: ERROR nova.compute.manager [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] [ 782.041659] env[61974]: DEBUG nova.compute.utils [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Binding failed for port 5d40974a-fcbf-49fd-a14a-b03902417c3a, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 782.042204] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.477s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.043740] env[61974]: INFO nova.compute.claims [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 782.046639] env[61974]: DEBUG nova.compute.manager [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Build of instance 2e217cbc-4962-44c7-b054-b3ae135ef8bb was re-scheduled: Binding failed for port 5d40974a-fcbf-49fd-a14a-b03902417c3a, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 782.047099] env[61974]: DEBUG nova.compute.manager [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 782.047410] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Acquiring lock "refresh_cache-2e217cbc-4962-44c7-b054-b3ae135ef8bb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 782.047562] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Acquired lock "refresh_cache-2e217cbc-4962-44c7-b054-b3ae135ef8bb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.047723] env[61974]: DEBUG nova.network.neutron [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 782.282990] env[61974]: INFO nova.compute.manager [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] [instance: 7a465c7e-874d-4cd1-9c23-0ae249997114] Took 1.03 seconds to deallocate network for instance. [ 782.467175] env[61974]: DEBUG nova.compute.manager [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 782.592358] env[61974]: DEBUG nova.network.neutron [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 782.707293] env[61974]: DEBUG nova.network.neutron [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.988915] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.209481] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Releasing lock "refresh_cache-2e217cbc-4962-44c7-b054-b3ae135ef8bb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.209798] env[61974]: DEBUG nova.compute.manager [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 783.209981] env[61974]: DEBUG nova.compute.manager [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 783.210176] env[61974]: DEBUG nova.network.neutron [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 783.233448] env[61974]: DEBUG nova.network.neutron [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 783.314362] env[61974]: INFO nova.scheduler.client.report [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Deleted allocations for instance 7a465c7e-874d-4cd1-9c23-0ae249997114 [ 783.379442] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b9793b-a673-4f46-936e-3cf766be839b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.388069] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6067faa0-273a-410a-99b4-150d92bd46e5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.424494] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd179204-2e85-49f2-8e77-88feaabda6d3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.432101] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db6e864d-1651-449a-8404-d9f847c11f88 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.445970] env[61974]: DEBUG nova.compute.provider_tree [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.736842] env[61974]: DEBUG nova.network.neutron [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.824693] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f42b6a72-2a60-4882-ac82-9306fbbad2dd tempest-ServersTestFqdnHostnames-666422140 tempest-ServersTestFqdnHostnames-666422140-project-member] Lock "7a465c7e-874d-4cd1-9c23-0ae249997114" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 137.247s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.949349] env[61974]: DEBUG nova.scheduler.client.report [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 784.241630] env[61974]: INFO nova.compute.manager [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] [instance: 2e217cbc-4962-44c7-b054-b3ae135ef8bb] Took 1.03 seconds to deallocate network for instance. [ 784.327383] env[61974]: DEBUG nova.compute.manager [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 784.462046] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.419s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.462323] env[61974]: DEBUG nova.compute.manager [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 784.464852] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.888s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.466957] env[61974]: INFO nova.compute.claims [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 784.852475] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.971394] env[61974]: DEBUG nova.compute.utils [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 784.977716] env[61974]: DEBUG nova.compute.manager [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 784.977889] env[61974]: DEBUG nova.network.neutron [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 785.042609] env[61974]: DEBUG nova.policy [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6173db476e814cbaa6b3278cfa527bbb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7dae05232e0041dba49b0432d64d82d2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 785.281527] env[61974]: INFO nova.scheduler.client.report [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Deleted allocations for instance 2e217cbc-4962-44c7-b054-b3ae135ef8bb [ 785.483447] env[61974]: DEBUG nova.compute.manager [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 785.568828] env[61974]: DEBUG nova.network.neutron [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Successfully created port: 9c2c80e9-3703-433d-a155-7504f2ebaba6 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 785.793191] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b3e7ee05-454b-41f7-b824-8cb90375dc64 tempest-ServersV294TestFqdnHostnames-1225047466 tempest-ServersV294TestFqdnHostnames-1225047466-project-member] Lock "2e217cbc-4962-44c7-b054-b3ae135ef8bb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 138.159s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.876729] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ea81aa-2b8a-4e4f-be4b-8a752f7a3520 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.885026] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953b17db-5064-4a2c-b53f-2528de9ff463 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.915132] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fdd2564-8512-4343-8d67-1016b49d8338 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.923111] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093f3653-60eb-4f24-a0c4-3f0d874f2bed {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.938183] env[61974]: DEBUG nova.compute.provider_tree [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 786.299908] env[61974]: DEBUG nova.compute.manager [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 786.445115] env[61974]: DEBUG nova.scheduler.client.report [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 786.498030] env[61974]: DEBUG nova.compute.manager [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 786.523043] env[61974]: DEBUG nova.virt.hardware [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 786.523725] env[61974]: DEBUG nova.virt.hardware [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 786.524052] env[61974]: DEBUG nova.virt.hardware [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 786.524390] env[61974]: DEBUG nova.virt.hardware [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 786.524713] env[61974]: DEBUG nova.virt.hardware [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 786.525106] env[61974]: DEBUG nova.virt.hardware [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 786.528019] env[61974]: DEBUG nova.virt.hardware [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 786.528019] env[61974]: DEBUG nova.virt.hardware [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 786.528019] env[61974]: DEBUG nova.virt.hardware [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 786.528019] env[61974]: DEBUG nova.virt.hardware [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 786.528019] env[61974]: DEBUG nova.virt.hardware [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 786.528290] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2cde62a-4a84-49ca-a93d-ed251e401c09 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.536426] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263e3e1c-d4a6-4e82-8489-8f94f6ca293a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.835397] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.954677] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.485s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.954677] env[61974]: DEBUG nova.compute.manager [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 786.957495] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.058s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.152314] env[61974]: DEBUG nova.compute.manager [req-65ee8890-cc89-462f-b3d5-b0d4c2be2d23 req-961a5627-b87e-4cd4-99f1-3a3a0f343492 service nova] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Received event network-changed-9c2c80e9-3703-433d-a155-7504f2ebaba6 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 787.152314] env[61974]: DEBUG nova.compute.manager [req-65ee8890-cc89-462f-b3d5-b0d4c2be2d23 req-961a5627-b87e-4cd4-99f1-3a3a0f343492 service nova] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Refreshing instance network info cache due to event network-changed-9c2c80e9-3703-433d-a155-7504f2ebaba6. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 787.152314] env[61974]: DEBUG oslo_concurrency.lockutils [req-65ee8890-cc89-462f-b3d5-b0d4c2be2d23 req-961a5627-b87e-4cd4-99f1-3a3a0f343492 service nova] Acquiring lock "refresh_cache-30455d07-4826-4561-a04f-1b4a2041402c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.152314] env[61974]: DEBUG oslo_concurrency.lockutils [req-65ee8890-cc89-462f-b3d5-b0d4c2be2d23 req-961a5627-b87e-4cd4-99f1-3a3a0f343492 service nova] Acquired lock "refresh_cache-30455d07-4826-4561-a04f-1b4a2041402c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.152314] env[61974]: DEBUG nova.network.neutron [req-65ee8890-cc89-462f-b3d5-b0d4c2be2d23 req-961a5627-b87e-4cd4-99f1-3a3a0f343492 service nova] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Refreshing network info cache for port 9c2c80e9-3703-433d-a155-7504f2ebaba6 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 787.466241] env[61974]: DEBUG nova.compute.utils [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 787.475665] env[61974]: DEBUG nova.compute.manager [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Not allocating networking since 'none' was specified. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 787.701262] env[61974]: DEBUG nova.network.neutron [req-65ee8890-cc89-462f-b3d5-b0d4c2be2d23 req-961a5627-b87e-4cd4-99f1-3a3a0f343492 service nova] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 787.705204] env[61974]: ERROR nova.compute.manager [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9c2c80e9-3703-433d-a155-7504f2ebaba6, please check neutron logs for more information. [ 787.705204] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 787.705204] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 787.705204] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 787.705204] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 787.705204] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 787.705204] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 787.705204] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 787.705204] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 787.705204] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 787.705204] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 787.705204] env[61974]: ERROR nova.compute.manager raise self.value [ 787.705204] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 787.705204] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 787.705204] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 787.705204] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 787.705627] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 787.705627] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 787.705627] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9c2c80e9-3703-433d-a155-7504f2ebaba6, please check neutron logs for more information. [ 787.705627] env[61974]: ERROR nova.compute.manager [ 787.705627] env[61974]: Traceback (most recent call last): [ 787.705627] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 787.705627] env[61974]: listener.cb(fileno) [ 787.705627] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 787.705627] env[61974]: result = function(*args, **kwargs) [ 787.705627] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 787.705627] env[61974]: return func(*args, **kwargs) [ 787.705627] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 787.705627] env[61974]: raise e [ 787.705627] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 787.705627] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 787.705627] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 787.705627] env[61974]: created_port_ids = self._update_ports_for_instance( [ 787.705627] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 787.705627] env[61974]: with excutils.save_and_reraise_exception(): [ 787.705627] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 787.705627] env[61974]: self.force_reraise() [ 787.705627] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 787.705627] env[61974]: raise self.value [ 787.705627] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 787.705627] env[61974]: updated_port = self._update_port( [ 787.705627] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 787.705627] env[61974]: _ensure_no_port_binding_failure(port) [ 787.705627] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 787.705627] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 787.706420] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 9c2c80e9-3703-433d-a155-7504f2ebaba6, please check neutron logs for more information. [ 787.706420] env[61974]: Removing descriptor: 20 [ 787.706420] env[61974]: ERROR nova.compute.manager [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9c2c80e9-3703-433d-a155-7504f2ebaba6, please check neutron logs for more information. [ 787.706420] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Traceback (most recent call last): [ 787.706420] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 787.706420] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] yield resources [ 787.706420] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 787.706420] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] self.driver.spawn(context, instance, image_meta, [ 787.706420] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 787.706420] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 787.706420] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 787.706420] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] vm_ref = self.build_virtual_machine(instance, [ 787.706741] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 787.706741] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] vif_infos = vmwarevif.get_vif_info(self._session, [ 787.706741] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 787.706741] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] for vif in network_info: [ 787.706741] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 787.706741] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] return self._sync_wrapper(fn, *args, **kwargs) [ 787.706741] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 787.706741] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] self.wait() [ 787.706741] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 787.706741] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] self[:] = self._gt.wait() [ 787.706741] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 787.706741] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] return self._exit_event.wait() [ 787.706741] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 787.707301] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] result = hub.switch() [ 787.707301] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 787.707301] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] return self.greenlet.switch() [ 787.707301] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 787.707301] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] result = function(*args, **kwargs) [ 787.707301] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 787.707301] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] return func(*args, **kwargs) [ 787.707301] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 787.707301] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] raise e [ 787.707301] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 787.707301] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] nwinfo = self.network_api.allocate_for_instance( [ 787.707301] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 787.707301] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] created_port_ids = self._update_ports_for_instance( [ 787.707726] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 787.707726] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] with excutils.save_and_reraise_exception(): [ 787.707726] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 787.707726] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] self.force_reraise() [ 787.707726] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 787.707726] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] raise self.value [ 787.707726] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 787.707726] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] updated_port = self._update_port( [ 787.707726] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 787.707726] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] _ensure_no_port_binding_failure(port) [ 787.707726] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 787.707726] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] raise exception.PortBindingFailed(port_id=port['id']) [ 787.708232] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] nova.exception.PortBindingFailed: Binding failed for port 9c2c80e9-3703-433d-a155-7504f2ebaba6, please check neutron logs for more information. [ 787.708232] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] [ 787.708232] env[61974]: INFO nova.compute.manager [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Terminating instance [ 787.709805] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "refresh_cache-30455d07-4826-4561-a04f-1b4a2041402c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.836616] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b9e66f-d749-47c6-9476-27371f8ea809 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.845487] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56620c8a-8344-4c00-b4f4-05b724ca1b15 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.881722] env[61974]: DEBUG nova.network.neutron [req-65ee8890-cc89-462f-b3d5-b0d4c2be2d23 req-961a5627-b87e-4cd4-99f1-3a3a0f343492 service nova] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.885251] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0edb325d-00a9-4ed4-a79d-896381da9e7c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.891510] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87597c25-958f-403a-bc8d-4c7aa9da008a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.905359] env[61974]: DEBUG nova.compute.provider_tree [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 787.978020] env[61974]: DEBUG nova.compute.manager [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 788.388062] env[61974]: DEBUG oslo_concurrency.lockutils [req-65ee8890-cc89-462f-b3d5-b0d4c2be2d23 req-961a5627-b87e-4cd4-99f1-3a3a0f343492 service nova] Releasing lock "refresh_cache-30455d07-4826-4561-a04f-1b4a2041402c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.388507] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "refresh_cache-30455d07-4826-4561-a04f-1b4a2041402c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.388691] env[61974]: DEBUG nova.network.neutron [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 788.411060] env[61974]: DEBUG nova.scheduler.client.report [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 788.909544] env[61974]: DEBUG nova.network.neutron [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.919350] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.962s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.919999] env[61974]: ERROR nova.compute.manager [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4e7ee498-7504-4417-bdee-0930fe9c9c61, please check neutron logs for more information. [ 788.919999] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Traceback (most recent call last): [ 788.919999] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 788.919999] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] self.driver.spawn(context, instance, image_meta, [ 788.919999] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 788.919999] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] self._vmops.spawn(context, instance, image_meta, injected_files, [ 788.919999] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 788.919999] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] vm_ref = self.build_virtual_machine(instance, [ 788.919999] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 788.919999] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] vif_infos = vmwarevif.get_vif_info(self._session, [ 788.919999] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 788.920371] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] for vif in network_info: [ 788.920371] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 788.920371] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] return self._sync_wrapper(fn, *args, **kwargs) [ 788.920371] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 788.920371] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] self.wait() [ 788.920371] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 788.920371] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] self[:] = self._gt.wait() [ 788.920371] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 788.920371] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] return self._exit_event.wait() [ 788.920371] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 788.920371] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] result = hub.switch() [ 788.920371] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 788.920371] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] return self.greenlet.switch() [ 788.920749] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 788.920749] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] result = function(*args, **kwargs) [ 788.920749] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 788.920749] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] return func(*args, **kwargs) [ 788.920749] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 788.920749] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] raise e [ 788.920749] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 788.920749] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] nwinfo = self.network_api.allocate_for_instance( [ 788.920749] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 788.920749] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] created_port_ids = self._update_ports_for_instance( [ 788.920749] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 788.920749] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] with excutils.save_and_reraise_exception(): [ 788.920749] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 788.921128] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] self.force_reraise() [ 788.921128] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 788.921128] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] raise self.value [ 788.921128] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 788.921128] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] updated_port = self._update_port( [ 788.921128] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 788.921128] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] _ensure_no_port_binding_failure(port) [ 788.921128] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 788.921128] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] raise exception.PortBindingFailed(port_id=port['id']) [ 788.921128] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] nova.exception.PortBindingFailed: Binding failed for port 4e7ee498-7504-4417-bdee-0930fe9c9c61, please check neutron logs for more information. [ 788.921128] env[61974]: ERROR nova.compute.manager [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] [ 788.921452] env[61974]: DEBUG nova.compute.utils [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Binding failed for port 4e7ee498-7504-4417-bdee-0930fe9c9c61, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 788.921881] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 14.750s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.922070] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.922238] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 788.922519] env[61974]: DEBUG oslo_concurrency.lockutils [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.736s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.924314] env[61974]: INFO nova.compute.claims [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 788.928512] env[61974]: DEBUG nova.compute.manager [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Build of instance d334c2d8-15d8-4f70-9a85-312687d1b337 was re-scheduled: Binding failed for port 4e7ee498-7504-4417-bdee-0930fe9c9c61, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 788.928835] env[61974]: DEBUG nova.compute.manager [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 788.929087] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Acquiring lock "refresh_cache-d334c2d8-15d8-4f70-9a85-312687d1b337" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.930031] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Acquired lock "refresh_cache-d334c2d8-15d8-4f70-9a85-312687d1b337" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.930031] env[61974]: DEBUG nova.network.neutron [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 788.931246] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ad2b03-2fc8-460b-ba55-4ff0243e1fff {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.944752] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353c1889-346c-4116-8df1-7e1f072cc05c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.961970] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2561ad8f-8cd9-4bd2-b2a5-09d2d6c836d3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.969186] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff1e5d0-118f-4d87-b6cc-d857fa41533f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.004415] env[61974]: DEBUG nova.compute.manager [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 789.006492] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181402MB free_disk=178GB free_vcpus=48 pci_devices=None {{(pid=61974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 789.006806] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.022745] env[61974]: DEBUG nova.network.neutron [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.033774] env[61974]: DEBUG nova.virt.hardware [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 789.034048] env[61974]: DEBUG nova.virt.hardware [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 789.034208] env[61974]: DEBUG nova.virt.hardware [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 789.034398] env[61974]: DEBUG nova.virt.hardware [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 789.034614] env[61974]: DEBUG nova.virt.hardware [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 789.034677] env[61974]: DEBUG nova.virt.hardware [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 789.034884] env[61974]: DEBUG nova.virt.hardware [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 789.035057] env[61974]: DEBUG nova.virt.hardware [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 789.035226] env[61974]: DEBUG nova.virt.hardware [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 789.035519] env[61974]: DEBUG nova.virt.hardware [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 789.035632] env[61974]: DEBUG nova.virt.hardware [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 789.037196] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35da726f-caa7-498d-afa5-6a0d110e3ec1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.046630] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eca3923-3d26-44a0-ab15-ae6a7bfe8508 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.060216] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Instance VIF info [] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 789.068567] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Creating folder: Project (c438a6ff41a34d33af3a37b1cbea15fa). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 789.068872] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-070a3a5e-89c2-486f-b8e9-4fae3d271c68 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.080343] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Created folder: Project (c438a6ff41a34d33af3a37b1cbea15fa) in parent group-v292912. [ 789.080602] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Creating folder: Instances. Parent ref: group-v292929. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 789.080780] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d0aa941-6b61-4f9a-b1e8-a7d02f3124c9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.090704] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Created folder: Instances in parent group-v292929. [ 789.090938] env[61974]: DEBUG oslo.service.loopingcall [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 789.091136] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 789.091333] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8bde8b4-1db6-41ed-a1d6-11612b5a543f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.107331] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 789.107331] env[61974]: value = "task-1378954" [ 789.107331] env[61974]: _type = "Task" [ 789.107331] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.115625] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1378954, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.209702] env[61974]: DEBUG nova.compute.manager [req-bca66c5e-a247-45ea-b1c5-8c9f92728c8a req-c2673aa7-686e-403c-8867-034f69a9c3d3 service nova] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Received event network-vif-deleted-9c2c80e9-3703-433d-a155-7504f2ebaba6 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 789.370525] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Acquiring lock "0a62f878-43c1-4aaf-9054-798572b4faa7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.370777] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Lock "0a62f878-43c1-4aaf-9054-798572b4faa7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.463407] env[61974]: DEBUG nova.network.neutron [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 789.526603] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "refresh_cache-30455d07-4826-4561-a04f-1b4a2041402c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.529541] env[61974]: DEBUG nova.compute.manager [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 789.529541] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 789.529541] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5290ab18-62f6-4a92-91bc-2f47f3d38e14 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.542258] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e2d8737-2ce7-41cb-99da-2bf71418f6af {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.565251] env[61974]: DEBUG nova.network.neutron [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.573981] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 30455d07-4826-4561-a04f-1b4a2041402c could not be found. [ 789.574289] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 789.574407] env[61974]: INFO nova.compute.manager [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Took 0.05 seconds to destroy the instance on the hypervisor. [ 789.574650] env[61974]: DEBUG oslo.service.loopingcall [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 789.575193] env[61974]: DEBUG nova.compute.manager [-] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 789.575282] env[61974]: DEBUG nova.network.neutron [-] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 789.592281] env[61974]: DEBUG nova.network.neutron [-] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 789.619281] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1378954, 'name': CreateVM_Task, 'duration_secs': 0.270812} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.619422] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 789.619865] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.620030] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.620376] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 789.620625] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f6569f1-9890-4ba0-9a1d-991741bd6518 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.626292] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for the task: (returnval){ [ 789.626292] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]521f885d-051e-507c-3f90-e0a575b4b411" [ 789.626292] env[61974]: _type = "Task" [ 789.626292] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.634829] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]521f885d-051e-507c-3f90-e0a575b4b411, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.077368] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Releasing lock "refresh_cache-d334c2d8-15d8-4f70-9a85-312687d1b337" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.077654] env[61974]: DEBUG nova.compute.manager [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 790.077792] env[61974]: DEBUG nova.compute.manager [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 790.077961] env[61974]: DEBUG nova.network.neutron [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 790.094180] env[61974]: DEBUG nova.network.neutron [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 790.095470] env[61974]: DEBUG nova.network.neutron [-] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.136977] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]521f885d-051e-507c-3f90-e0a575b4b411, 'name': SearchDatastore_Task, 'duration_secs': 0.020247} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.139512] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.139753] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 790.139979] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.140139] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.140318] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 790.140734] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b5e7b64-3c33-4bc5-ad06-13639b16ddca {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.148192] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 790.148337] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 790.148978] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a95a15eb-c192-4d33-8159-fbb4954c4037 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.156114] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for the task: (returnval){ [ 790.156114] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]520b4c9f-2aa1-09b3-c238-f510f362056d" [ 790.156114] env[61974]: _type = "Task" [ 790.156114] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.163925] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]520b4c9f-2aa1-09b3-c238-f510f362056d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.245849] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e32393-b455-441e-911b-4f19a578c45b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.253503] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe7fe30-bb62-455b-a43c-a01a9543d42d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.287253] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72337ac0-b4fd-41ac-bfbe-79ac54417ba7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.294224] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1737645-3f36-45ed-ab55-1ea8f46603f5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.308702] env[61974]: DEBUG nova.compute.provider_tree [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 790.598865] env[61974]: DEBUG nova.network.neutron [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.600586] env[61974]: INFO nova.compute.manager [-] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Took 1.03 seconds to deallocate network for instance. [ 790.605796] env[61974]: DEBUG nova.compute.claims [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 790.605796] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.667173] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]520b4c9f-2aa1-09b3-c238-f510f362056d, 'name': SearchDatastore_Task, 'duration_secs': 0.008427} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.667841] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5adb3d48-920c-405e-a700-f7d864553255 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.672863] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for the task: (returnval){ [ 790.672863] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52584ceb-df24-d75f-f45a-5a987ed8e1be" [ 790.672863] env[61974]: _type = "Task" [ 790.672863] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.680530] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52584ceb-df24-d75f-f45a-5a987ed8e1be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.813531] env[61974]: DEBUG nova.scheduler.client.report [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 791.109083] env[61974]: INFO nova.compute.manager [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] [instance: d334c2d8-15d8-4f70-9a85-312687d1b337] Took 1.03 seconds to deallocate network for instance. [ 791.183576] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52584ceb-df24-d75f-f45a-5a987ed8e1be, 'name': SearchDatastore_Task, 'duration_secs': 0.008608} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.183840] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.184103] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] f6b76518-d691-4e4f-861a-624a1684e564/f6b76518-d691-4e4f-861a-624a1684e564.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 791.184353] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f3d97ff-e360-4b31-b0a1-412f1f466345 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.191216] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for the task: (returnval){ [ 791.191216] env[61974]: value = "task-1378955" [ 791.191216] env[61974]: _type = "Task" [ 791.191216] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.198767] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378955, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.321241] env[61974]: DEBUG oslo_concurrency.lockutils [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.398s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.322494] env[61974]: DEBUG nova.compute.manager [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 791.325099] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.702354] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378955, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472258} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.702800] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] f6b76518-d691-4e4f-861a-624a1684e564/f6b76518-d691-4e4f-861a-624a1684e564.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 791.702976] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 791.703269] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7930d614-d931-41af-a50d-a8785063db89 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.709731] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for the task: (returnval){ [ 791.709731] env[61974]: value = "task-1378956" [ 791.709731] env[61974]: _type = "Task" [ 791.709731] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.718270] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378956, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.829897] env[61974]: DEBUG nova.compute.utils [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 791.834805] env[61974]: DEBUG nova.compute.manager [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Not allocating networking since 'none' was specified. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 792.148439] env[61974]: INFO nova.scheduler.client.report [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Deleted allocations for instance d334c2d8-15d8-4f70-9a85-312687d1b337 [ 792.158327] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e230c247-a4ce-42b9-8269-2ad3fd75c61c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.168674] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce224c3-182a-4b31-9044-6a43f1a1ad97 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.200838] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-775912e6-d722-4e8f-b3fe-774cc6d78b98 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.208624] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af236cb1-69f0-4d5d-a796-7b523c1f36f9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.221012] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378956, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064398} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.229000] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 792.229514] env[61974]: DEBUG nova.compute.provider_tree [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 792.231467] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf89b1e-cd24-4b5a-8337-4ab5be0ae3f7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.255320] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] f6b76518-d691-4e4f-861a-624a1684e564/f6b76518-d691-4e4f-861a-624a1684e564.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 792.256251] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cff29ed2-898e-4d69-bfba-27744ddad386 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.275818] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for the task: (returnval){ [ 792.275818] env[61974]: value = "task-1378957" [ 792.275818] env[61974]: _type = "Task" [ 792.275818] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.283837] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378957, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.335616] env[61974]: DEBUG nova.compute.manager [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 792.662870] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7703be4a-966d-4dd3-b571-1f927d2e2a4e tempest-ImagesOneServerNegativeTestJSON-497748301 tempest-ImagesOneServerNegativeTestJSON-497748301-project-member] Lock "d334c2d8-15d8-4f70-9a85-312687d1b337" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 140.662s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.735224] env[61974]: DEBUG nova.scheduler.client.report [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 792.786249] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378957, 'name': ReconfigVM_Task, 'duration_secs': 0.278701} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.786570] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Reconfigured VM instance instance-0000002a to attach disk [datastore2] f6b76518-d691-4e4f-861a-624a1684e564/f6b76518-d691-4e4f-861a-624a1684e564.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 792.787214] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d78f75f3-c0db-455b-a845-598ed0436621 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.793948] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for the task: (returnval){ [ 792.793948] env[61974]: value = "task-1378958" [ 792.793948] env[61974]: _type = "Task" [ 792.793948] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.804730] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378958, 'name': Rename_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.166247] env[61974]: DEBUG nova.compute.manager [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 793.241191] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.916s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.242225] env[61974]: ERROR nova.compute.manager [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5957485a-ed0d-498b-94bb-d13ae3849afd, please check neutron logs for more information. [ 793.242225] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Traceback (most recent call last): [ 793.242225] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 793.242225] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] self.driver.spawn(context, instance, image_meta, [ 793.242225] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 793.242225] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 793.242225] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 793.242225] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] vm_ref = self.build_virtual_machine(instance, [ 793.242225] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 793.242225] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] vif_infos = vmwarevif.get_vif_info(self._session, [ 793.242225] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 793.243510] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] for vif in network_info: [ 793.243510] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 793.243510] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] return self._sync_wrapper(fn, *args, **kwargs) [ 793.243510] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 793.243510] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] self.wait() [ 793.243510] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 793.243510] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] self[:] = self._gt.wait() [ 793.243510] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 793.243510] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] return self._exit_event.wait() [ 793.243510] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 793.243510] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] result = hub.switch() [ 793.243510] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 793.243510] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] return self.greenlet.switch() [ 793.244051] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 793.244051] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] result = function(*args, **kwargs) [ 793.244051] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 793.244051] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] return func(*args, **kwargs) [ 793.244051] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 793.244051] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] raise e [ 793.244051] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 793.244051] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] nwinfo = self.network_api.allocate_for_instance( [ 793.244051] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 793.244051] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] created_port_ids = self._update_ports_for_instance( [ 793.244051] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 793.244051] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] with excutils.save_and_reraise_exception(): [ 793.244051] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 793.244429] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] self.force_reraise() [ 793.244429] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 793.244429] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] raise self.value [ 793.244429] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 793.244429] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] updated_port = self._update_port( [ 793.244429] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 793.244429] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] _ensure_no_port_binding_failure(port) [ 793.244429] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 793.244429] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] raise exception.PortBindingFailed(port_id=port['id']) [ 793.244429] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] nova.exception.PortBindingFailed: Binding failed for port 5957485a-ed0d-498b-94bb-d13ae3849afd, please check neutron logs for more information. [ 793.244429] env[61974]: ERROR nova.compute.manager [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] [ 793.244909] env[61974]: DEBUG nova.compute.utils [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Binding failed for port 5957485a-ed0d-498b-94bb-d13ae3849afd, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 793.244909] env[61974]: DEBUG oslo_concurrency.lockutils [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.554s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.246299] env[61974]: INFO nova.compute.claims [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 793.249540] env[61974]: DEBUG nova.compute.manager [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Build of instance a1c488d6-4eb4-4362-84cd-68151a47d3bd was re-scheduled: Binding failed for port 5957485a-ed0d-498b-94bb-d13ae3849afd, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 793.250009] env[61974]: DEBUG nova.compute.manager [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 793.250246] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Acquiring lock "refresh_cache-a1c488d6-4eb4-4362-84cd-68151a47d3bd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.250394] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Acquired lock "refresh_cache-a1c488d6-4eb4-4362-84cd-68151a47d3bd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.250555] env[61974]: DEBUG nova.network.neutron [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 793.305496] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378958, 'name': Rename_Task, 'duration_secs': 0.229336} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.307345] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 793.307611] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e69fc67-9b2a-4482-a98a-eed69d7fda6d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.314518] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for the task: (returnval){ [ 793.314518] env[61974]: value = "task-1378959" [ 793.314518] env[61974]: _type = "Task" [ 793.314518] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.322836] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378959, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.346202] env[61974]: DEBUG nova.compute.manager [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 793.381484] env[61974]: DEBUG nova.virt.hardware [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 793.382533] env[61974]: DEBUG nova.virt.hardware [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 793.382533] env[61974]: DEBUG nova.virt.hardware [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 793.382533] env[61974]: DEBUG nova.virt.hardware [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 793.382533] env[61974]: DEBUG nova.virt.hardware [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 793.382533] env[61974]: DEBUG nova.virt.hardware [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 793.382916] env[61974]: DEBUG nova.virt.hardware [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 793.382916] env[61974]: DEBUG nova.virt.hardware [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 793.382916] env[61974]: DEBUG nova.virt.hardware [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 793.383117] env[61974]: DEBUG nova.virt.hardware [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 793.383350] env[61974]: DEBUG nova.virt.hardware [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 793.384181] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71828cb1-5352-4052-9633-ae6d9f0c2386 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.393087] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d93c144-2a16-4109-8f04-ff04a79e5ef1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.410639] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Instance VIF info [] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 793.416657] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Creating folder: Project (eeed5f7b7537483ab431161c90cafd76). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 793.416753] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-54e12748-e64f-4909-88c1-324d85c2cbe7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.427017] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Created folder: Project (eeed5f7b7537483ab431161c90cafd76) in parent group-v292912. [ 793.427241] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Creating folder: Instances. Parent ref: group-v292932. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 793.427534] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1094b648-faee-476a-94d1-617f2d51dffd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.433952] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquiring lock "68794d97-95f7-4612-9f9f-e370afb3d852" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.433952] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Lock "68794d97-95f7-4612-9f9f-e370afb3d852" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.441728] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Created folder: Instances in parent group-v292932. [ 793.441728] env[61974]: DEBUG oslo.service.loopingcall [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 793.441728] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 793.441728] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6048c9c6-4c54-4084-b530-35e01c48085d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.461731] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 793.461731] env[61974]: value = "task-1378962" [ 793.461731] env[61974]: _type = "Task" [ 793.461731] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.469839] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1378962, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.694533] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.776074] env[61974]: DEBUG nova.network.neutron [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 793.824739] env[61974]: DEBUG oslo_vmware.api [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378959, 'name': PowerOnVM_Task, 'duration_secs': 0.432188} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.825708] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 793.825708] env[61974]: INFO nova.compute.manager [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Took 4.82 seconds to spawn the instance on the hypervisor. [ 793.825708] env[61974]: DEBUG nova.compute.manager [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 793.826257] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-732bbc8b-39a8-4765-8c4d-e74434699a5b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.886313] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquiring lock "a9edbd98-3e67-476b-934d-15d893a62d02" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.886616] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Lock "a9edbd98-3e67-476b-934d-15d893a62d02" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.900529] env[61974]: DEBUG nova.network.neutron [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.973514] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1378962, 'name': CreateVM_Task, 'duration_secs': 0.256826} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.973699] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 793.974134] env[61974]: DEBUG oslo_concurrency.lockutils [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.974309] env[61974]: DEBUG oslo_concurrency.lockutils [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.974605] env[61974]: DEBUG oslo_concurrency.lockutils [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 793.974853] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-416588d6-d9ff-4dae-b0dd-85eab25e2030 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.979567] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Waiting for the task: (returnval){ [ 793.979567] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5293b474-425d-9c1c-454a-e19b2651ee91" [ 793.979567] env[61974]: _type = "Task" [ 793.979567] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.987269] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5293b474-425d-9c1c-454a-e19b2651ee91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.349354] env[61974]: INFO nova.compute.manager [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Took 24.80 seconds to build instance. [ 794.403655] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Releasing lock "refresh_cache-a1c488d6-4eb4-4362-84cd-68151a47d3bd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 794.404647] env[61974]: DEBUG nova.compute.manager [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 794.404876] env[61974]: DEBUG nova.compute.manager [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 794.405314] env[61974]: DEBUG nova.network.neutron [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 794.436869] env[61974]: DEBUG nova.network.neutron [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 794.497861] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5293b474-425d-9c1c-454a-e19b2651ee91, 'name': SearchDatastore_Task, 'duration_secs': 0.008763} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.498264] env[61974]: DEBUG oslo_concurrency.lockutils [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 794.498500] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 794.498773] env[61974]: DEBUG oslo_concurrency.lockutils [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.499047] env[61974]: DEBUG oslo_concurrency.lockutils [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.499313] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 794.499620] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db7f690c-59ad-4a47-b248-f0b7a22c586e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.510598] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 794.511102] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 794.511799] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e0fd16b-b802-43ba-bd35-2ac83738d382 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.519576] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Waiting for the task: (returnval){ [ 794.519576] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52b54d4e-4ff2-f7a6-a17b-2fbc23efc04b" [ 794.519576] env[61974]: _type = "Task" [ 794.519576] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.533246] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52b54d4e-4ff2-f7a6-a17b-2fbc23efc04b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.707400] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c3e693-2997-4e37-9637-92ee6a0b267c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.719976] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-551f28fc-74ad-4644-ae8d-685bd7b01bef {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.758264] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c08bfc-fdea-4382-9b2c-e9686bb9ba72 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.766910] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7074d65a-de72-4699-9da9-e9f6eb74ecff {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.780532] env[61974]: DEBUG nova.compute.provider_tree [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 794.855243] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30ef28f2-f8a6-42c0-a1e3-3b244d7f48f4 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Lock "f6b76518-d691-4e4f-861a-624a1684e564" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 138.819s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.941261] env[61974]: DEBUG nova.network.neutron [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.034364] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52b54d4e-4ff2-f7a6-a17b-2fbc23efc04b, 'name': SearchDatastore_Task, 'duration_secs': 0.014513} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.035281] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb7569ca-3c56-44af-95de-3031f51cb859 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.043741] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Waiting for the task: (returnval){ [ 795.043741] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52f809cf-f874-c1ac-fc90-f77a3d9eafa5" [ 795.043741] env[61974]: _type = "Task" [ 795.043741] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.056047] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52f809cf-f874-c1ac-fc90-f77a3d9eafa5, 'name': SearchDatastore_Task, 'duration_secs': 0.008903} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.056047] env[61974]: DEBUG oslo_concurrency.lockutils [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.056047] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 59b1ad04-c949-4b07-af77-f84f842dd9ee/59b1ad04-c949-4b07-af77-f84f842dd9ee.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 795.056047] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2230bf11-fdd1-4c7a-af8d-257543bf8d78 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.063129] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Waiting for the task: (returnval){ [ 795.063129] env[61974]: value = "task-1378963" [ 795.063129] env[61974]: _type = "Task" [ 795.063129] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.072570] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Task: {'id': task-1378963, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.080577] env[61974]: INFO nova.compute.manager [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Rebuilding instance [ 795.131650] env[61974]: DEBUG nova.compute.manager [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 795.132608] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f87b53d-f2fa-4344-8839-69f418b245c9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.285145] env[61974]: DEBUG nova.scheduler.client.report [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 795.358088] env[61974]: DEBUG nova.compute.manager [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 795.443529] env[61974]: INFO nova.compute.manager [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] [instance: a1c488d6-4eb4-4362-84cd-68151a47d3bd] Took 1.04 seconds to deallocate network for instance. [ 795.572977] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Task: {'id': task-1378963, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.448076} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.573306] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 59b1ad04-c949-4b07-af77-f84f842dd9ee/59b1ad04-c949-4b07-af77-f84f842dd9ee.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 795.573570] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 795.573866] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6e523172-c38c-46a9-9356-898a2113f3a4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.580922] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Waiting for the task: (returnval){ [ 795.580922] env[61974]: value = "task-1378964" [ 795.580922] env[61974]: _type = "Task" [ 795.580922] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.588438] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Task: {'id': task-1378964, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.643066] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 795.643478] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2c6a6e3-8d2c-4e34-9c27-6e9b1e7d3325 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.650196] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for the task: (returnval){ [ 795.650196] env[61974]: value = "task-1378965" [ 795.650196] env[61974]: _type = "Task" [ 795.650196] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.658542] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378965, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.790845] env[61974]: DEBUG oslo_concurrency.lockutils [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.546s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.791486] env[61974]: DEBUG nova.compute.manager [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 795.794168] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.460s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.878425] env[61974]: DEBUG oslo_concurrency.lockutils [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.090541] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Task: {'id': task-1378964, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069573} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.090817] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 796.091676] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b4c253-b941-4a03-b687-63cec10fec06 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.112086] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] 59b1ad04-c949-4b07-af77-f84f842dd9ee/59b1ad04-c949-4b07-af77-f84f842dd9ee.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 796.113043] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f47f689-eddc-4c80-9f12-a6cbcd00d26b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.132498] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Waiting for the task: (returnval){ [ 796.132498] env[61974]: value = "task-1378966" [ 796.132498] env[61974]: _type = "Task" [ 796.132498] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.140296] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Task: {'id': task-1378966, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.158737] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378965, 'name': PowerOffVM_Task, 'duration_secs': 0.19008} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.159089] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 796.159370] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 796.160110] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4765e18-27aa-40b6-a929-6905591d19a6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.166612] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 796.166847] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93c6c992-abd0-4c66-9474-9a4f105e0cb0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.189970] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 796.190239] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 796.190495] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Deleting the datastore file [datastore2] f6b76518-d691-4e4f-861a-624a1684e564 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 796.190784] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f082392-ced9-4a2c-a054-e17478ba0636 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.197278] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for the task: (returnval){ [ 796.197278] env[61974]: value = "task-1378968" [ 796.197278] env[61974]: _type = "Task" [ 796.197278] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.205188] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378968, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.306671] env[61974]: DEBUG nova.compute.utils [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 796.307416] env[61974]: DEBUG nova.compute.manager [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 796.307416] env[61974]: DEBUG nova.network.neutron [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 796.361358] env[61974]: DEBUG nova.policy [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '71ef8b1a94a84981a183816e2b3df9e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4978da9dfa484f0da89f231a02c45024', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 796.471255] env[61974]: INFO nova.scheduler.client.report [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Deleted allocations for instance a1c488d6-4eb4-4362-84cd-68151a47d3bd [ 796.643540] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Task: {'id': task-1378966, 'name': ReconfigVM_Task, 'duration_secs': 0.322334} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.644341] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Reconfigured VM instance instance-0000002b to attach disk [datastore2] 59b1ad04-c949-4b07-af77-f84f842dd9ee/59b1ad04-c949-4b07-af77-f84f842dd9ee.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 796.646062] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6cea1540-e854-4f0a-9279-591671ab41ed {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.648354] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf110d52-d37c-4de6-ae8b-a2b853b5b67e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.658120] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b705c202-abb2-46cd-a618-864dc8f62666 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.661136] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Waiting for the task: (returnval){ [ 796.661136] env[61974]: value = "task-1378969" [ 796.661136] env[61974]: _type = "Task" [ 796.661136] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.691551] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1234e4cc-b37b-4a4b-afb7-c974b860709e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.698022] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Task: {'id': task-1378969, 'name': Rename_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.704479] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d4ee54-6d23-40ae-8028-e967eb3854b9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.712273] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378968, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135335} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.712273] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 796.712273] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 796.712273] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 796.722564] env[61974]: DEBUG nova.compute.provider_tree [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 796.724612] env[61974]: DEBUG nova.network.neutron [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Successfully created port: 9d2964d9-0189-4d44-86a8-494c91ea9068 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 796.811017] env[61974]: DEBUG nova.compute.manager [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 796.982268] env[61974]: DEBUG oslo_concurrency.lockutils [None req-88b5d2e2-697e-4b53-b727-de2f811b40a9 tempest-ServerActionsTestJSON-1955538191 tempest-ServerActionsTestJSON-1955538191-project-member] Lock "a1c488d6-4eb4-4362-84cd-68151a47d3bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 143.838s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.171214] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Task: {'id': task-1378969, 'name': Rename_Task, 'duration_secs': 0.132651} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.171463] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 797.171741] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-74d88884-b5bb-4ed5-b53d-249196dd542c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.178154] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Waiting for the task: (returnval){ [ 797.178154] env[61974]: value = "task-1378970" [ 797.178154] env[61974]: _type = "Task" [ 797.178154] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.185276] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Task: {'id': task-1378970, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.249393] env[61974]: ERROR nova.scheduler.client.report [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [req-dadec1c7-392a-4661-aefc-e7986b8c85ad] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 81f72dd1-35ef-4b87-b120-a6ea5ab8608a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-dadec1c7-392a-4661-aefc-e7986b8c85ad"}]}: nova.exception.PortBindingFailed: Binding failed for port 4b51d819-b080-4a77-aeb5-a352cd3ff6c4, please check neutron logs for more information. [ 797.273616] env[61974]: DEBUG nova.scheduler.client.report [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Refreshing inventories for resource provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 797.292248] env[61974]: DEBUG nova.scheduler.client.report [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Updating ProviderTree inventory for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 797.292248] env[61974]: DEBUG nova.compute.provider_tree [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 797.303029] env[61974]: DEBUG nova.scheduler.client.report [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Refreshing aggregate associations for resource provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a, aggregates: None {{(pid=61974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 797.325885] env[61974]: DEBUG nova.scheduler.client.report [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Refreshing trait associations for resource provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=61974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 797.485971] env[61974]: DEBUG nova.compute.manager [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 797.581725] env[61974]: DEBUG nova.compute.manager [req-8f31205b-5ad9-4d71-ac74-080c5068860b req-29f6b8c2-1a85-413a-b72a-78ca07204531 service nova] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Received event network-changed-9d2964d9-0189-4d44-86a8-494c91ea9068 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 797.581725] env[61974]: DEBUG nova.compute.manager [req-8f31205b-5ad9-4d71-ac74-080c5068860b req-29f6b8c2-1a85-413a-b72a-78ca07204531 service nova] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Refreshing instance network info cache due to event network-changed-9d2964d9-0189-4d44-86a8-494c91ea9068. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 797.582081] env[61974]: DEBUG oslo_concurrency.lockutils [req-8f31205b-5ad9-4d71-ac74-080c5068860b req-29f6b8c2-1a85-413a-b72a-78ca07204531 service nova] Acquiring lock "refresh_cache-2b74ee60-ce70-429a-9ccb-1f96c236cf8c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.582081] env[61974]: DEBUG oslo_concurrency.lockutils [req-8f31205b-5ad9-4d71-ac74-080c5068860b req-29f6b8c2-1a85-413a-b72a-78ca07204531 service nova] Acquired lock "refresh_cache-2b74ee60-ce70-429a-9ccb-1f96c236cf8c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.582231] env[61974]: DEBUG nova.network.neutron [req-8f31205b-5ad9-4d71-ac74-080c5068860b req-29f6b8c2-1a85-413a-b72a-78ca07204531 service nova] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Refreshing network info cache for port 9d2964d9-0189-4d44-86a8-494c91ea9068 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 797.677104] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e86b342-b4ec-411e-8c6b-58c0ffdedf5e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.685134] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d43659-d682-47e1-9b4e-fd033abb0f83 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.691192] env[61974]: DEBUG oslo_vmware.api [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Task: {'id': task-1378970, 'name': PowerOnVM_Task, 'duration_secs': 0.398251} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.691793] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 797.692123] env[61974]: INFO nova.compute.manager [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Took 4.35 seconds to spawn the instance on the hypervisor. [ 797.692391] env[61974]: DEBUG nova.compute.manager [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 797.693113] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba28c7f-6d18-4e70-9f6e-60da9789c415 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.727407] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674a9217-cff2-4ad8-a8a8-e171329dca6d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.742534] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02eead84-773e-4569-946a-b4d07de6e3b1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.759020] env[61974]: DEBUG nova.compute.provider_tree [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 797.768589] env[61974]: DEBUG nova.virt.hardware [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 797.768846] env[61974]: DEBUG nova.virt.hardware [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 797.769013] env[61974]: DEBUG nova.virt.hardware [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 797.769203] env[61974]: DEBUG nova.virt.hardware [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 797.769339] env[61974]: DEBUG nova.virt.hardware [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 797.769676] env[61974]: DEBUG nova.virt.hardware [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 797.769676] env[61974]: DEBUG nova.virt.hardware [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 797.769838] env[61974]: DEBUG nova.virt.hardware [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 797.770235] env[61974]: DEBUG nova.virt.hardware [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 797.770235] env[61974]: DEBUG nova.virt.hardware [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 797.770329] env[61974]: DEBUG nova.virt.hardware [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 797.771181] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1526d4fe-3504-409d-8c6d-24e7aec18b39 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.779241] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6bb0262-8461-46b9-8a14-ad0af1fcc8d1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.793339] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Instance VIF info [] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 797.799483] env[61974]: DEBUG oslo.service.loopingcall [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 797.799921] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 797.800150] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-465da746-0a7d-4c71-82de-89ddfc4237da {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.816453] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 797.816453] env[61974]: value = "task-1378971" [ 797.816453] env[61974]: _type = "Task" [ 797.816453] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.821622] env[61974]: DEBUG nova.compute.manager [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 797.828621] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1378971, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.830237] env[61974]: ERROR nova.compute.manager [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9d2964d9-0189-4d44-86a8-494c91ea9068, please check neutron logs for more information. [ 797.830237] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 797.830237] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 797.830237] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 797.830237] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 797.830237] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 797.830237] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 797.830237] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 797.830237] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 797.830237] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 797.830237] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 797.830237] env[61974]: ERROR nova.compute.manager raise self.value [ 797.830237] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 797.830237] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 797.830237] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 797.830237] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 797.830821] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 797.830821] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 797.830821] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9d2964d9-0189-4d44-86a8-494c91ea9068, please check neutron logs for more information. [ 797.830821] env[61974]: ERROR nova.compute.manager [ 797.830821] env[61974]: Traceback (most recent call last): [ 797.830821] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 797.830821] env[61974]: listener.cb(fileno) [ 797.830821] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 797.830821] env[61974]: result = function(*args, **kwargs) [ 797.830821] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 797.830821] env[61974]: return func(*args, **kwargs) [ 797.830821] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 797.830821] env[61974]: raise e [ 797.830821] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 797.830821] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 797.830821] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 797.830821] env[61974]: created_port_ids = self._update_ports_for_instance( [ 797.830821] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 797.830821] env[61974]: with excutils.save_and_reraise_exception(): [ 797.830821] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 797.830821] env[61974]: self.force_reraise() [ 797.830821] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 797.830821] env[61974]: raise self.value [ 797.830821] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 797.830821] env[61974]: updated_port = self._update_port( [ 797.830821] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 797.830821] env[61974]: _ensure_no_port_binding_failure(port) [ 797.830821] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 797.830821] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 797.831847] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 9d2964d9-0189-4d44-86a8-494c91ea9068, please check neutron logs for more information. [ 797.831847] env[61974]: Removing descriptor: 21 [ 797.841467] env[61974]: DEBUG nova.virt.hardware [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 797.841694] env[61974]: DEBUG nova.virt.hardware [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 797.841881] env[61974]: DEBUG nova.virt.hardware [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 797.842040] env[61974]: DEBUG nova.virt.hardware [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 797.842204] env[61974]: DEBUG nova.virt.hardware [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 797.842349] env[61974]: DEBUG nova.virt.hardware [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 797.842581] env[61974]: DEBUG nova.virt.hardware [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 797.842784] env[61974]: DEBUG nova.virt.hardware [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 797.842999] env[61974]: DEBUG nova.virt.hardware [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 797.843226] env[61974]: DEBUG nova.virt.hardware [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 797.843406] env[61974]: DEBUG nova.virt.hardware [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 797.844222] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b04cd2-b9a8-4854-b4a6-1c89843d7f61 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.851255] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8f1859-de5f-4f8e-8600-005b8fe53b3b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.865556] env[61974]: ERROR nova.compute.manager [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9d2964d9-0189-4d44-86a8-494c91ea9068, please check neutron logs for more information. [ 797.865556] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Traceback (most recent call last): [ 797.865556] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 797.865556] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] yield resources [ 797.865556] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 797.865556] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] self.driver.spawn(context, instance, image_meta, [ 797.865556] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 797.865556] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 797.865556] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 797.865556] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] vm_ref = self.build_virtual_machine(instance, [ 797.865556] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 797.866239] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] vif_infos = vmwarevif.get_vif_info(self._session, [ 797.866239] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 797.866239] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] for vif in network_info: [ 797.866239] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 797.866239] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] return self._sync_wrapper(fn, *args, **kwargs) [ 797.866239] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 797.866239] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] self.wait() [ 797.866239] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 797.866239] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] self[:] = self._gt.wait() [ 797.866239] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 797.866239] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] return self._exit_event.wait() [ 797.866239] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 797.866239] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] current.throw(*self._exc) [ 797.866903] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 797.866903] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] result = function(*args, **kwargs) [ 797.866903] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 797.866903] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] return func(*args, **kwargs) [ 797.866903] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 797.866903] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] raise e [ 797.866903] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 797.866903] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] nwinfo = self.network_api.allocate_for_instance( [ 797.866903] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 797.866903] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] created_port_ids = self._update_ports_for_instance( [ 797.866903] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 797.866903] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] with excutils.save_and_reraise_exception(): [ 797.866903] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 797.867426] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] self.force_reraise() [ 797.867426] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 797.867426] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] raise self.value [ 797.867426] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 797.867426] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] updated_port = self._update_port( [ 797.867426] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 797.867426] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] _ensure_no_port_binding_failure(port) [ 797.867426] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 797.867426] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] raise exception.PortBindingFailed(port_id=port['id']) [ 797.867426] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] nova.exception.PortBindingFailed: Binding failed for port 9d2964d9-0189-4d44-86a8-494c91ea9068, please check neutron logs for more information. [ 797.867426] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] [ 797.867426] env[61974]: INFO nova.compute.manager [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Terminating instance [ 797.867933] env[61974]: DEBUG oslo_concurrency.lockutils [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Acquiring lock "refresh_cache-2b74ee60-ce70-429a-9ccb-1f96c236cf8c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.010590] env[61974]: DEBUG oslo_concurrency.lockutils [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.103393] env[61974]: DEBUG nova.network.neutron [req-8f31205b-5ad9-4d71-ac74-080c5068860b req-29f6b8c2-1a85-413a-b72a-78ca07204531 service nova] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 798.204168] env[61974]: DEBUG nova.network.neutron [req-8f31205b-5ad9-4d71-ac74-080c5068860b req-29f6b8c2-1a85-413a-b72a-78ca07204531 service nova] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.245131] env[61974]: INFO nova.compute.manager [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Took 24.07 seconds to build instance. [ 798.299930] env[61974]: DEBUG nova.scheduler.client.report [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Updated inventory for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with generation 71 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 798.300233] env[61974]: DEBUG nova.compute.provider_tree [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Updating resource provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a generation from 71 to 72 during operation: update_inventory {{(pid=61974) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 798.300415] env[61974]: DEBUG nova.compute.provider_tree [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 798.328567] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1378971, 'name': CreateVM_Task, 'duration_secs': 0.350627} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.328806] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 798.329302] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.329685] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.330037] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 798.330299] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20150e90-6e55-494f-b615-fabda6f35099 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.335249] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for the task: (returnval){ [ 798.335249] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]528c4b2c-68c9-a98f-2f4d-4146f3cf1736" [ 798.335249] env[61974]: _type = "Task" [ 798.335249] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.344725] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]528c4b2c-68c9-a98f-2f4d-4146f3cf1736, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.709230] env[61974]: DEBUG oslo_concurrency.lockutils [req-8f31205b-5ad9-4d71-ac74-080c5068860b req-29f6b8c2-1a85-413a-b72a-78ca07204531 service nova] Releasing lock "refresh_cache-2b74ee60-ce70-429a-9ccb-1f96c236cf8c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.709762] env[61974]: DEBUG oslo_concurrency.lockutils [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Acquired lock "refresh_cache-2b74ee60-ce70-429a-9ccb-1f96c236cf8c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.709975] env[61974]: DEBUG nova.network.neutron [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 798.747802] env[61974]: DEBUG oslo_concurrency.lockutils [None req-849994f2-3598-40c0-918f-2c1cca505818 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Lock "59b1ad04-c949-4b07-af77-f84f842dd9ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 133.127s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.805906] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 3.012s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.806543] env[61974]: ERROR nova.compute.manager [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4b51d819-b080-4a77-aeb5-a352cd3ff6c4, please check neutron logs for more information. [ 798.806543] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Traceback (most recent call last): [ 798.806543] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 798.806543] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] self.driver.spawn(context, instance, image_meta, [ 798.806543] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 798.806543] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 798.806543] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 798.806543] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] vm_ref = self.build_virtual_machine(instance, [ 798.806543] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 798.806543] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] vif_infos = vmwarevif.get_vif_info(self._session, [ 798.806543] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 798.806989] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] for vif in network_info: [ 798.806989] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 798.806989] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] return self._sync_wrapper(fn, *args, **kwargs) [ 798.806989] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 798.806989] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] self.wait() [ 798.806989] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 798.806989] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] self[:] = self._gt.wait() [ 798.806989] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 798.806989] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] return self._exit_event.wait() [ 798.806989] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 798.806989] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] current.throw(*self._exc) [ 798.806989] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 798.806989] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] result = function(*args, **kwargs) [ 798.807420] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 798.807420] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] return func(*args, **kwargs) [ 798.807420] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 798.807420] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] raise e [ 798.807420] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 798.807420] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] nwinfo = self.network_api.allocate_for_instance( [ 798.807420] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 798.807420] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] created_port_ids = self._update_ports_for_instance( [ 798.807420] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 798.807420] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] with excutils.save_and_reraise_exception(): [ 798.807420] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 798.807420] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] self.force_reraise() [ 798.807420] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 798.807852] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] raise self.value [ 798.807852] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 798.807852] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] updated_port = self._update_port( [ 798.807852] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 798.807852] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] _ensure_no_port_binding_failure(port) [ 798.807852] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 798.807852] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] raise exception.PortBindingFailed(port_id=port['id']) [ 798.807852] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] nova.exception.PortBindingFailed: Binding failed for port 4b51d819-b080-4a77-aeb5-a352cd3ff6c4, please check neutron logs for more information. [ 798.807852] env[61974]: ERROR nova.compute.manager [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] [ 798.807852] env[61974]: DEBUG nova.compute.utils [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Binding failed for port 4b51d819-b080-4a77-aeb5-a352cd3ff6c4, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 798.808522] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.820s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.810477] env[61974]: INFO nova.compute.claims [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 798.813205] env[61974]: DEBUG nova.compute.manager [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Build of instance 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb was re-scheduled: Binding failed for port 4b51d819-b080-4a77-aeb5-a352cd3ff6c4, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 798.813675] env[61974]: DEBUG nova.compute.manager [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 798.813907] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "refresh_cache-955ed729-6cbc-4a7a-9abf-c1078cd4ddbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.814057] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquired lock "refresh_cache-955ed729-6cbc-4a7a-9abf-c1078cd4ddbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.814214] env[61974]: DEBUG nova.network.neutron [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 798.851759] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]528c4b2c-68c9-a98f-2f4d-4146f3cf1736, 'name': SearchDatastore_Task, 'duration_secs': 0.011496} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.851759] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.851759] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 798.851759] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.852014] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.852014] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 798.852014] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c601f86-77ba-4223-a0d2-77117f06cece {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.864745] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 798.864745] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 798.864745] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55e9003c-ab7b-4676-9f78-3f47fa26b962 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.869886] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for the task: (returnval){ [ 798.869886] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52cd396d-febb-51c9-ebc1-baafbc130493" [ 798.869886] env[61974]: _type = "Task" [ 798.869886] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.880739] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52cd396d-febb-51c9-ebc1-baafbc130493, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.062314] env[61974]: DEBUG nova.compute.manager [None req-88a5fe26-3de9-4690-a306-ecd1955d5222 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 799.063358] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95ba6f8-efeb-4595-a43e-0eeee27bc7ba {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.148543] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Acquiring lock "59b1ad04-c949-4b07-af77-f84f842dd9ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.148889] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Lock "59b1ad04-c949-4b07-af77-f84f842dd9ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.149419] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Acquiring lock "59b1ad04-c949-4b07-af77-f84f842dd9ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.149781] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Lock "59b1ad04-c949-4b07-af77-f84f842dd9ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.150090] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Lock "59b1ad04-c949-4b07-af77-f84f842dd9ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.154815] env[61974]: INFO nova.compute.manager [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Terminating instance [ 799.157646] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Acquiring lock "refresh_cache-59b1ad04-c949-4b07-af77-f84f842dd9ee" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.157803] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Acquired lock "refresh_cache-59b1ad04-c949-4b07-af77-f84f842dd9ee" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.158753] env[61974]: DEBUG nova.network.neutron [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 799.234528] env[61974]: DEBUG nova.network.neutron [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 799.250328] env[61974]: DEBUG nova.compute.manager [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 799.345044] env[61974]: DEBUG nova.network.neutron [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 799.381675] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52cd396d-febb-51c9-ebc1-baafbc130493, 'name': SearchDatastore_Task, 'duration_secs': 0.009252} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.382657] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63188aac-280c-4784-b806-4fa72509aab0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.388147] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for the task: (returnval){ [ 799.388147] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52d11d8f-b4e6-ea0d-a94e-192617db2282" [ 799.388147] env[61974]: _type = "Task" [ 799.388147] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.396126] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d11d8f-b4e6-ea0d-a94e-192617db2282, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.410320] env[61974]: DEBUG nova.network.neutron [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.514633] env[61974]: DEBUG nova.network.neutron [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.573744] env[61974]: INFO nova.compute.manager [None req-88a5fe26-3de9-4690-a306-ecd1955d5222 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] instance snapshotting [ 799.574580] env[61974]: DEBUG nova.objects.instance [None req-88a5fe26-3de9-4690-a306-ecd1955d5222 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Lazy-loading 'flavor' on Instance uuid 59b1ad04-c949-4b07-af77-f84f842dd9ee {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 799.610204] env[61974]: DEBUG nova.compute.manager [req-26ee0c0a-4bc1-4cd0-8a32-0e20f1d773ea req-9a9e7beb-f933-42a3-b2b2-2824f8a0f70f service nova] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Received event network-vif-deleted-9d2964d9-0189-4d44-86a8-494c91ea9068 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 799.690108] env[61974]: DEBUG nova.network.neutron [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 799.756589] env[61974]: DEBUG nova.network.neutron [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.779106] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.900359] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d11d8f-b4e6-ea0d-a94e-192617db2282, 'name': SearchDatastore_Task, 'duration_secs': 0.037926} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.901654] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.901654] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] f6b76518-d691-4e4f-861a-624a1684e564/f6b76518-d691-4e4f-861a-624a1684e564.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 799.901654] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-efb66c5d-5dca-4529-8283-5d0c9e5b2d88 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.907998] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for the task: (returnval){ [ 799.907998] env[61974]: value = "task-1378972" [ 799.907998] env[61974]: _type = "Task" [ 799.907998] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.913692] env[61974]: DEBUG oslo_concurrency.lockutils [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Releasing lock "refresh_cache-2b74ee60-ce70-429a-9ccb-1f96c236cf8c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.914107] env[61974]: DEBUG nova.compute.manager [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 799.914305] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 799.920018] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9b821215-6f34-4852-8db6-d019181bf03d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.921135] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378972, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.927132] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9fa8807-a612-4ca7-9fa7-11dac60cf9c9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.952964] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2b74ee60-ce70-429a-9ccb-1f96c236cf8c could not be found. [ 799.954423] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 799.954423] env[61974]: INFO nova.compute.manager [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 799.954423] env[61974]: DEBUG oslo.service.loopingcall [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 799.956362] env[61974]: DEBUG nova.compute.manager [-] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 799.956466] env[61974]: DEBUG nova.network.neutron [-] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 799.972377] env[61974]: DEBUG nova.network.neutron [-] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 800.017336] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Releasing lock "refresh_cache-955ed729-6cbc-4a7a-9abf-c1078cd4ddbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.017606] env[61974]: DEBUG nova.compute.manager [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 800.017825] env[61974]: DEBUG nova.compute.manager [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 800.017959] env[61974]: DEBUG nova.network.neutron [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 800.036309] env[61974]: DEBUG nova.network.neutron [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 800.080401] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49fd6d31-d4bc-414f-ac45-73b8ff6648f8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.099572] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67da2f15-0999-4e50-a6c0-62f99dfd57dc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.207607] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a872d6-b616-4b93-a415-67e76af8d922 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.216288] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00038b1c-54d7-4364-9195-2082fe2f58dc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.251747] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6192ec-b874-4a7f-a53e-ed9929effb7a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.262084] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Releasing lock "refresh_cache-59b1ad04-c949-4b07-af77-f84f842dd9ee" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.262530] env[61974]: DEBUG nova.compute.manager [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 800.262741] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 800.263680] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ade7df-be0c-4f8a-b5b5-0fef7ca89975 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.268086] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3a07e8-24ee-469d-bbce-0d8f80d618ce {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.283510] env[61974]: DEBUG nova.compute.provider_tree [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 800.287021] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 800.287620] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52f0d322-fb8c-4aa5-b228-52c05f1f59ff {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.295511] env[61974]: DEBUG oslo_vmware.api [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Waiting for the task: (returnval){ [ 800.295511] env[61974]: value = "task-1378973" [ 800.295511] env[61974]: _type = "Task" [ 800.295511] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.308960] env[61974]: DEBUG oslo_vmware.api [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Task: {'id': task-1378973, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.425402] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378972, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.475053] env[61974]: DEBUG nova.network.neutron [-] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.539056] env[61974]: DEBUG nova.network.neutron [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.617801] env[61974]: DEBUG nova.compute.manager [None req-88a5fe26-3de9-4690-a306-ecd1955d5222 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Instance disappeared during snapshot {{(pid=61974) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4494}} [ 800.769510] env[61974]: DEBUG nova.compute.manager [None req-88a5fe26-3de9-4690-a306-ecd1955d5222 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Found 0 images (rotation: 2) {{(pid=61974) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 800.789174] env[61974]: DEBUG nova.scheduler.client.report [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 800.806840] env[61974]: DEBUG oslo_vmware.api [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Task: {'id': task-1378973, 'name': PowerOffVM_Task, 'duration_secs': 0.197596} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.807677] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 800.807802] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 800.808625] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-25e235dd-0cc2-4823-ad38-d22ab39a61ee {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.837020] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 800.837020] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 800.837020] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Deleting the datastore file [datastore2] 59b1ad04-c949-4b07-af77-f84f842dd9ee {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 800.837020] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-af4f73cd-f082-4379-bd8b-240ca3662da0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.845257] env[61974]: DEBUG oslo_vmware.api [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Waiting for the task: (returnval){ [ 800.845257] env[61974]: value = "task-1378975" [ 800.845257] env[61974]: _type = "Task" [ 800.845257] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.852288] env[61974]: DEBUG oslo_vmware.api [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Task: {'id': task-1378975, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.919269] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378972, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.661071} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.919567] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] f6b76518-d691-4e4f-861a-624a1684e564/f6b76518-d691-4e4f-861a-624a1684e564.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 800.919821] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 800.920088] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-91dd25fb-bd12-4917-a761-03fa92022258 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.928825] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for the task: (returnval){ [ 800.928825] env[61974]: value = "task-1378976" [ 800.928825] env[61974]: _type = "Task" [ 800.928825] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.937940] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378976, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.978380] env[61974]: INFO nova.compute.manager [-] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Took 1.02 seconds to deallocate network for instance. [ 800.980845] env[61974]: DEBUG nova.compute.claims [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 800.981011] env[61974]: DEBUG oslo_concurrency.lockutils [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.042058] env[61974]: INFO nova.compute.manager [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb] Took 1.02 seconds to deallocate network for instance. [ 801.294221] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.486s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.294737] env[61974]: DEBUG nova.compute.manager [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 801.300903] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.446s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.303180] env[61974]: INFO nova.compute.claims [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 801.355724] env[61974]: DEBUG oslo_vmware.api [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Task: {'id': task-1378975, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100827} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.355979] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 801.356178] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 801.356356] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 801.356527] env[61974]: INFO nova.compute.manager [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Took 1.09 seconds to destroy the instance on the hypervisor. [ 801.356764] env[61974]: DEBUG oslo.service.loopingcall [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 801.356944] env[61974]: DEBUG nova.compute.manager [-] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 801.357161] env[61974]: DEBUG nova.network.neutron [-] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 801.390235] env[61974]: DEBUG nova.network.neutron [-] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 801.439990] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378976, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104934} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.440275] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 801.441086] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9bac3c-25b1-46d9-b247-ac66664dbaeb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.461984] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] f6b76518-d691-4e4f-861a-624a1684e564/f6b76518-d691-4e4f-861a-624a1684e564.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 801.462318] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e18a663d-6a58-41fd-b376-88eb647da941 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.485197] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for the task: (returnval){ [ 801.485197] env[61974]: value = "task-1378977" [ 801.485197] env[61974]: _type = "Task" [ 801.485197] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.493746] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378977, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.807877] env[61974]: DEBUG nova.compute.utils [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 801.811433] env[61974]: DEBUG nova.compute.manager [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 801.811694] env[61974]: DEBUG nova.network.neutron [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 801.868810] env[61974]: DEBUG nova.policy [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '363828c32a204f83b05e8492eb177098', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7779372a20e04d10b28d4ff9b784b689', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 801.894018] env[61974]: DEBUG nova.network.neutron [-] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.995690] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378977, 'name': ReconfigVM_Task, 'duration_secs': 0.318788} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.996338] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Reconfigured VM instance instance-0000002a to attach disk [datastore1] f6b76518-d691-4e4f-861a-624a1684e564/f6b76518-d691-4e4f-861a-624a1684e564.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 801.996977] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e1ac039-50a0-4891-8141-aec5a1943ee1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.006580] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for the task: (returnval){ [ 802.006580] env[61974]: value = "task-1378978" [ 802.006580] env[61974]: _type = "Task" [ 802.006580] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.016907] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378978, 'name': Rename_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.083387] env[61974]: INFO nova.scheduler.client.report [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Deleted allocations for instance 955ed729-6cbc-4a7a-9abf-c1078cd4ddbb [ 802.312480] env[61974]: DEBUG nova.compute.manager [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 802.396298] env[61974]: INFO nova.compute.manager [-] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Took 1.04 seconds to deallocate network for instance. [ 802.516522] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378978, 'name': Rename_Task, 'duration_secs': 0.122147} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.519296] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 802.520745] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27038fe1-4d2f-46ca-9317-8f98cbfd8ec9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.530543] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for the task: (returnval){ [ 802.530543] env[61974]: value = "task-1378979" [ 802.530543] env[61974]: _type = "Task" [ 802.530543] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.543269] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378979, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.562073] env[61974]: DEBUG nova.network.neutron [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Successfully created port: 17cefec7-e4b4-4279-9a97-d86484b6c3b2 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 802.595295] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52699d1d-89b1-471e-9eda-0ed0538cfa84 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "955ed729-6cbc-4a7a-9abf-c1078cd4ddbb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 147.508s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.665959] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e83f84-6022-477c-b313-734aa8b6289b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.674739] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5bc6573-eda3-4482-9e41-bef690ea7c70 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.714433] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865e3ec7-9bd2-4bfc-842d-e95eb3a49af9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.723430] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa6ac396-4649-419a-bf9a-2123c22c2aeb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.741937] env[61974]: DEBUG nova.compute.provider_tree [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 802.901738] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.051664] env[61974]: DEBUG oslo_vmware.api [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378979, 'name': PowerOnVM_Task, 'duration_secs': 0.413265} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.052609] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 803.052609] env[61974]: DEBUG nova.compute.manager [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 803.053263] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d78f2382-7cc4-41f3-b659-e166af61d749 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.098554] env[61974]: DEBUG nova.compute.manager [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 803.299447] env[61974]: DEBUG nova.scheduler.client.report [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Updated inventory for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with generation 72 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 803.299572] env[61974]: DEBUG nova.compute.provider_tree [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Updating resource provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a generation from 72 to 73 during operation: update_inventory {{(pid=61974) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 803.299754] env[61974]: DEBUG nova.compute.provider_tree [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 803.327237] env[61974]: DEBUG nova.compute.manager [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 803.354678] env[61974]: DEBUG nova.virt.hardware [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 803.354949] env[61974]: DEBUG nova.virt.hardware [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 803.355090] env[61974]: DEBUG nova.virt.hardware [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 803.355446] env[61974]: DEBUG nova.virt.hardware [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 803.355446] env[61974]: DEBUG nova.virt.hardware [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 803.355599] env[61974]: DEBUG nova.virt.hardware [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 803.355778] env[61974]: DEBUG nova.virt.hardware [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 803.355934] env[61974]: DEBUG nova.virt.hardware [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 803.356122] env[61974]: DEBUG nova.virt.hardware [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 803.356287] env[61974]: DEBUG nova.virt.hardware [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 803.356457] env[61974]: DEBUG nova.virt.hardware [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 803.357923] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b259c308-9e91-4d4f-9152-7536bd514c13 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.366048] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30d05bd-4f51-4564-9639-5759538f79e0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.578998] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.633493] env[61974]: DEBUG oslo_concurrency.lockutils [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.804618] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.506s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.805618] env[61974]: DEBUG nova.compute.manager [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 803.808312] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.973s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.812049] env[61974]: INFO nova.compute.claims [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 804.021354] env[61974]: DEBUG nova.compute.manager [req-0ec0b6e5-00cf-46f9-8559-735ad97c5348 req-5efa7ce5-b9dd-4337-b085-09464b7c574c service nova] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Received event network-changed-17cefec7-e4b4-4279-9a97-d86484b6c3b2 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 804.021604] env[61974]: DEBUG nova.compute.manager [req-0ec0b6e5-00cf-46f9-8559-735ad97c5348 req-5efa7ce5-b9dd-4337-b085-09464b7c574c service nova] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Refreshing instance network info cache due to event network-changed-17cefec7-e4b4-4279-9a97-d86484b6c3b2. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 804.022172] env[61974]: DEBUG oslo_concurrency.lockutils [req-0ec0b6e5-00cf-46f9-8559-735ad97c5348 req-5efa7ce5-b9dd-4337-b085-09464b7c574c service nova] Acquiring lock "refresh_cache-b935b7e2-ba4b-452a-9eca-2fad5acc9055" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.022989] env[61974]: DEBUG oslo_concurrency.lockutils [req-0ec0b6e5-00cf-46f9-8559-735ad97c5348 req-5efa7ce5-b9dd-4337-b085-09464b7c574c service nova] Acquired lock "refresh_cache-b935b7e2-ba4b-452a-9eca-2fad5acc9055" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.022989] env[61974]: DEBUG nova.network.neutron [req-0ec0b6e5-00cf-46f9-8559-735ad97c5348 req-5efa7ce5-b9dd-4337-b085-09464b7c574c service nova] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Refreshing network info cache for port 17cefec7-e4b4-4279-9a97-d86484b6c3b2 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 804.105411] env[61974]: ERROR nova.compute.manager [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 17cefec7-e4b4-4279-9a97-d86484b6c3b2, please check neutron logs for more information. [ 804.105411] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 804.105411] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 804.105411] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 804.105411] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 804.105411] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 804.105411] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 804.105411] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 804.105411] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.105411] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 804.105411] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.105411] env[61974]: ERROR nova.compute.manager raise self.value [ 804.105411] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 804.105411] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 804.105411] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 804.105411] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 804.105943] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 804.105943] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 804.105943] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 17cefec7-e4b4-4279-9a97-d86484b6c3b2, please check neutron logs for more information. [ 804.105943] env[61974]: ERROR nova.compute.manager [ 804.105943] env[61974]: Traceback (most recent call last): [ 804.105943] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 804.105943] env[61974]: listener.cb(fileno) [ 804.105943] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 804.105943] env[61974]: result = function(*args, **kwargs) [ 804.105943] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 804.105943] env[61974]: return func(*args, **kwargs) [ 804.105943] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 804.105943] env[61974]: raise e [ 804.105943] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 804.105943] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 804.105943] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 804.105943] env[61974]: created_port_ids = self._update_ports_for_instance( [ 804.105943] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 804.105943] env[61974]: with excutils.save_and_reraise_exception(): [ 804.105943] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.105943] env[61974]: self.force_reraise() [ 804.105943] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.105943] env[61974]: raise self.value [ 804.105943] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 804.105943] env[61974]: updated_port = self._update_port( [ 804.105943] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 804.105943] env[61974]: _ensure_no_port_binding_failure(port) [ 804.105943] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 804.105943] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 804.106993] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 17cefec7-e4b4-4279-9a97-d86484b6c3b2, please check neutron logs for more information. [ 804.106993] env[61974]: Removing descriptor: 20 [ 804.106993] env[61974]: ERROR nova.compute.manager [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 17cefec7-e4b4-4279-9a97-d86484b6c3b2, please check neutron logs for more information. [ 804.106993] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Traceback (most recent call last): [ 804.106993] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 804.106993] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] yield resources [ 804.106993] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 804.106993] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] self.driver.spawn(context, instance, image_meta, [ 804.106993] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 804.106993] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] self._vmops.spawn(context, instance, image_meta, injected_files, [ 804.106993] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 804.106993] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] vm_ref = self.build_virtual_machine(instance, [ 804.107436] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 804.107436] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] vif_infos = vmwarevif.get_vif_info(self._session, [ 804.107436] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 804.107436] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] for vif in network_info: [ 804.107436] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 804.107436] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] return self._sync_wrapper(fn, *args, **kwargs) [ 804.107436] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 804.107436] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] self.wait() [ 804.107436] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 804.107436] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] self[:] = self._gt.wait() [ 804.107436] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 804.107436] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] return self._exit_event.wait() [ 804.107436] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 804.107870] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] result = hub.switch() [ 804.107870] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 804.107870] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] return self.greenlet.switch() [ 804.107870] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 804.107870] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] result = function(*args, **kwargs) [ 804.107870] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 804.107870] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] return func(*args, **kwargs) [ 804.107870] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 804.107870] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] raise e [ 804.107870] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 804.107870] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] nwinfo = self.network_api.allocate_for_instance( [ 804.107870] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 804.107870] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] created_port_ids = self._update_ports_for_instance( [ 804.108319] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 804.108319] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] with excutils.save_and_reraise_exception(): [ 804.108319] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.108319] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] self.force_reraise() [ 804.108319] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.108319] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] raise self.value [ 804.108319] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 804.108319] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] updated_port = self._update_port( [ 804.108319] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 804.108319] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] _ensure_no_port_binding_failure(port) [ 804.108319] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 804.108319] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] raise exception.PortBindingFailed(port_id=port['id']) [ 804.108711] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] nova.exception.PortBindingFailed: Binding failed for port 17cefec7-e4b4-4279-9a97-d86484b6c3b2, please check neutron logs for more information. [ 804.108711] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] [ 804.108711] env[61974]: INFO nova.compute.manager [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Terminating instance [ 804.108820] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Acquiring lock "refresh_cache-b935b7e2-ba4b-452a-9eca-2fad5acc9055" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.315193] env[61974]: DEBUG nova.compute.utils [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 804.323022] env[61974]: DEBUG nova.compute.manager [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 804.323022] env[61974]: DEBUG nova.network.neutron [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 804.383944] env[61974]: DEBUG nova.policy [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5fb3973c32a645fb82106b90ee5e33a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd104a741ebad47748ae5646356589fce', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 804.410375] env[61974]: DEBUG oslo_concurrency.lockutils [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "635f362a-582e-44bc-85d8-8a69943982b0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.410599] env[61974]: DEBUG oslo_concurrency.lockutils [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "635f362a-582e-44bc-85d8-8a69943982b0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.543594] env[61974]: DEBUG nova.network.neutron [req-0ec0b6e5-00cf-46f9-8559-735ad97c5348 req-5efa7ce5-b9dd-4337-b085-09464b7c574c service nova] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 804.654019] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Acquiring lock "f6b76518-d691-4e4f-861a-624a1684e564" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.654019] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Lock "f6b76518-d691-4e4f-861a-624a1684e564" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.654019] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Acquiring lock "f6b76518-d691-4e4f-861a-624a1684e564-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.654019] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Lock "f6b76518-d691-4e4f-861a-624a1684e564-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.654255] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Lock "f6b76518-d691-4e4f-861a-624a1684e564-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.654898] env[61974]: INFO nova.compute.manager [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Terminating instance [ 804.656815] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Acquiring lock "refresh_cache-f6b76518-d691-4e4f-861a-624a1684e564" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.657110] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Acquired lock "refresh_cache-f6b76518-d691-4e4f-861a-624a1684e564" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.657492] env[61974]: DEBUG nova.network.neutron [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 804.671569] env[61974]: DEBUG nova.network.neutron [req-0ec0b6e5-00cf-46f9-8559-735ad97c5348 req-5efa7ce5-b9dd-4337-b085-09464b7c574c service nova] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.821815] env[61974]: DEBUG nova.compute.manager [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 804.954851] env[61974]: DEBUG nova.network.neutron [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Successfully created port: fc4573ce-7909-401d-89b0-9dadf3f5e629 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 805.174441] env[61974]: DEBUG oslo_concurrency.lockutils [req-0ec0b6e5-00cf-46f9-8559-735ad97c5348 req-5efa7ce5-b9dd-4337-b085-09464b7c574c service nova] Releasing lock "refresh_cache-b935b7e2-ba4b-452a-9eca-2fad5acc9055" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.174842] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Acquired lock "refresh_cache-b935b7e2-ba4b-452a-9eca-2fad5acc9055" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.175037] env[61974]: DEBUG nova.network.neutron [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 805.182933] env[61974]: DEBUG nova.network.neutron [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 805.219118] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b71e61b4-9f22-49fb-a40e-d474da476fb4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.228681] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d4a160-2a04-48b2-a8cd-134e843c29cd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.264701] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3005bd6-04a6-473e-8cf9-d8e949e45686 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.272465] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a4f3af-db51-4308-8c0f-7d0d728cb70d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.286387] env[61974]: DEBUG nova.compute.provider_tree [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 805.288413] env[61974]: DEBUG nova.network.neutron [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.709014] env[61974]: DEBUG nova.network.neutron [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 805.793456] env[61974]: DEBUG nova.scheduler.client.report [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 805.794928] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Releasing lock "refresh_cache-f6b76518-d691-4e4f-861a-624a1684e564" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.795453] env[61974]: DEBUG nova.compute.manager [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 805.795807] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 805.796743] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de60060-9fa0-4fe8-9e7e-6d598a121fe0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.806354] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 805.807015] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ee45791-6846-49b1-a3ca-c279705b1607 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.819224] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Acquiring lock "f88f0ef2-24f2-4eef-92a3-8de2ebb6944a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.819224] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Lock "f88f0ef2-24f2-4eef-92a3-8de2ebb6944a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.819224] env[61974]: DEBUG oslo_vmware.api [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for the task: (returnval){ [ 805.819224] env[61974]: value = "task-1378980" [ 805.819224] env[61974]: _type = "Task" [ 805.819224] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.828124] env[61974]: DEBUG oslo_vmware.api [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378980, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.828913] env[61974]: DEBUG nova.network.neutron [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.838549] env[61974]: DEBUG nova.compute.manager [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 805.866512] env[61974]: DEBUG nova.virt.hardware [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 805.866749] env[61974]: DEBUG nova.virt.hardware [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 805.866901] env[61974]: DEBUG nova.virt.hardware [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 805.867089] env[61974]: DEBUG nova.virt.hardware [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 805.867235] env[61974]: DEBUG nova.virt.hardware [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 805.867430] env[61974]: DEBUG nova.virt.hardware [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 805.867706] env[61974]: DEBUG nova.virt.hardware [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 805.867804] env[61974]: DEBUG nova.virt.hardware [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 805.867964] env[61974]: DEBUG nova.virt.hardware [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 805.868138] env[61974]: DEBUG nova.virt.hardware [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 805.868298] env[61974]: DEBUG nova.virt.hardware [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 805.869149] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013c6466-1f09-4871-a015-a8cdaf2f7faf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.876853] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0c0006-37c8-4fd5-af3f-253745ada017 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.047617] env[61974]: DEBUG nova.compute.manager [req-34d50892-0853-481b-991b-35b94b3a4560 req-f04b724f-4886-44df-9f1f-52adca262fbf service nova] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Received event network-vif-deleted-17cefec7-e4b4-4279-9a97-d86484b6c3b2 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 806.047831] env[61974]: DEBUG nova.compute.manager [req-34d50892-0853-481b-991b-35b94b3a4560 req-f04b724f-4886-44df-9f1f-52adca262fbf service nova] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Received event network-changed-fc4573ce-7909-401d-89b0-9dadf3f5e629 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 806.047992] env[61974]: DEBUG nova.compute.manager [req-34d50892-0853-481b-991b-35b94b3a4560 req-f04b724f-4886-44df-9f1f-52adca262fbf service nova] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Refreshing instance network info cache due to event network-changed-fc4573ce-7909-401d-89b0-9dadf3f5e629. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 806.048217] env[61974]: DEBUG oslo_concurrency.lockutils [req-34d50892-0853-481b-991b-35b94b3a4560 req-f04b724f-4886-44df-9f1f-52adca262fbf service nova] Acquiring lock "refresh_cache-4628f895-1ae5-4d25-8095-f892b86769f5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.048384] env[61974]: DEBUG oslo_concurrency.lockutils [req-34d50892-0853-481b-991b-35b94b3a4560 req-f04b724f-4886-44df-9f1f-52adca262fbf service nova] Acquired lock "refresh_cache-4628f895-1ae5-4d25-8095-f892b86769f5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.048624] env[61974]: DEBUG nova.network.neutron [req-34d50892-0853-481b-991b-35b94b3a4560 req-f04b724f-4886-44df-9f1f-52adca262fbf service nova] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Refreshing network info cache for port fc4573ce-7909-401d-89b0-9dadf3f5e629 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 806.134133] env[61974]: ERROR nova.compute.manager [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fc4573ce-7909-401d-89b0-9dadf3f5e629, please check neutron logs for more information. [ 806.134133] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 806.134133] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 806.134133] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 806.134133] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 806.134133] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 806.134133] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 806.134133] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 806.134133] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 806.134133] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 806.134133] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 806.134133] env[61974]: ERROR nova.compute.manager raise self.value [ 806.134133] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 806.134133] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 806.134133] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 806.134133] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 806.134593] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 806.134593] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 806.134593] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fc4573ce-7909-401d-89b0-9dadf3f5e629, please check neutron logs for more information. [ 806.134593] env[61974]: ERROR nova.compute.manager [ 806.134593] env[61974]: Traceback (most recent call last): [ 806.134593] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 806.134593] env[61974]: listener.cb(fileno) [ 806.134593] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 806.134593] env[61974]: result = function(*args, **kwargs) [ 806.134593] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 806.134593] env[61974]: return func(*args, **kwargs) [ 806.134593] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 806.134593] env[61974]: raise e [ 806.134593] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 806.134593] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 806.134593] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 806.134593] env[61974]: created_port_ids = self._update_ports_for_instance( [ 806.134593] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 806.134593] env[61974]: with excutils.save_and_reraise_exception(): [ 806.134593] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 806.134593] env[61974]: self.force_reraise() [ 806.134593] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 806.134593] env[61974]: raise self.value [ 806.134593] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 806.134593] env[61974]: updated_port = self._update_port( [ 806.134593] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 806.134593] env[61974]: _ensure_no_port_binding_failure(port) [ 806.134593] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 806.134593] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 806.135431] env[61974]: nova.exception.PortBindingFailed: Binding failed for port fc4573ce-7909-401d-89b0-9dadf3f5e629, please check neutron logs for more information. [ 806.135431] env[61974]: Removing descriptor: 20 [ 806.135431] env[61974]: ERROR nova.compute.manager [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fc4573ce-7909-401d-89b0-9dadf3f5e629, please check neutron logs for more information. [ 806.135431] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Traceback (most recent call last): [ 806.135431] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 806.135431] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] yield resources [ 806.135431] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 806.135431] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] self.driver.spawn(context, instance, image_meta, [ 806.135431] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 806.135431] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 806.135431] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 806.135431] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] vm_ref = self.build_virtual_machine(instance, [ 806.135861] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 806.135861] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] vif_infos = vmwarevif.get_vif_info(self._session, [ 806.135861] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 806.135861] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] for vif in network_info: [ 806.135861] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 806.135861] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] return self._sync_wrapper(fn, *args, **kwargs) [ 806.135861] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 806.135861] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] self.wait() [ 806.135861] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 806.135861] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] self[:] = self._gt.wait() [ 806.135861] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 806.135861] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] return self._exit_event.wait() [ 806.135861] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 806.136253] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] result = hub.switch() [ 806.136253] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 806.136253] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] return self.greenlet.switch() [ 806.136253] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 806.136253] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] result = function(*args, **kwargs) [ 806.136253] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 806.136253] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] return func(*args, **kwargs) [ 806.136253] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 806.136253] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] raise e [ 806.136253] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 806.136253] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] nwinfo = self.network_api.allocate_for_instance( [ 806.136253] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 806.136253] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] created_port_ids = self._update_ports_for_instance( [ 806.136670] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 806.136670] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] with excutils.save_and_reraise_exception(): [ 806.136670] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 806.136670] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] self.force_reraise() [ 806.136670] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 806.136670] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] raise self.value [ 806.136670] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 806.136670] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] updated_port = self._update_port( [ 806.136670] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 806.136670] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] _ensure_no_port_binding_failure(port) [ 806.136670] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 806.136670] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] raise exception.PortBindingFailed(port_id=port['id']) [ 806.136999] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] nova.exception.PortBindingFailed: Binding failed for port fc4573ce-7909-401d-89b0-9dadf3f5e629, please check neutron logs for more information. [ 806.136999] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] [ 806.136999] env[61974]: INFO nova.compute.manager [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Terminating instance [ 806.138618] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "refresh_cache-4628f895-1ae5-4d25-8095-f892b86769f5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.301917] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.302439] env[61974]: DEBUG nova.compute.manager [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 806.304946] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 17.298s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.329214] env[61974]: DEBUG oslo_vmware.api [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378980, 'name': PowerOffVM_Task, 'duration_secs': 0.14005} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.329419] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 806.329581] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 806.329890] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d951dc0e-7769-4e9c-b8df-ea79ae072b19 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.331567] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Releasing lock "refresh_cache-b935b7e2-ba4b-452a-9eca-2fad5acc9055" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.331944] env[61974]: DEBUG nova.compute.manager [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 806.332142] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 806.332377] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a73e80da-32ab-437b-875f-1452a827833d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.343022] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc541fbe-9622-47d8-8a15-24fdc43e3d6c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.356712] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 806.356926] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 806.357115] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Deleting the datastore file [datastore1] f6b76518-d691-4e4f-861a-624a1684e564 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 806.357377] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-55e98226-bfea-4f33-9a7c-0a3bb5b9bdbe {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.363090] env[61974]: DEBUG oslo_vmware.api [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for the task: (returnval){ [ 806.363090] env[61974]: value = "task-1378982" [ 806.363090] env[61974]: _type = "Task" [ 806.363090] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.367409] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b935b7e2-ba4b-452a-9eca-2fad5acc9055 could not be found. [ 806.367621] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 806.367802] env[61974]: INFO nova.compute.manager [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Took 0.04 seconds to destroy the instance on the hypervisor. [ 806.368042] env[61974]: DEBUG oslo.service.loopingcall [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 806.370836] env[61974]: DEBUG nova.compute.manager [-] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 806.370937] env[61974]: DEBUG nova.network.neutron [-] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 806.376828] env[61974]: DEBUG oslo_vmware.api [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378982, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.386018] env[61974]: DEBUG nova.network.neutron [-] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 806.579430] env[61974]: DEBUG nova.network.neutron [req-34d50892-0853-481b-991b-35b94b3a4560 req-f04b724f-4886-44df-9f1f-52adca262fbf service nova] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 806.684477] env[61974]: DEBUG nova.network.neutron [req-34d50892-0853-481b-991b-35b94b3a4560 req-f04b724f-4886-44df-9f1f-52adca262fbf service nova] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.809388] env[61974]: DEBUG nova.compute.utils [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 806.814634] env[61974]: DEBUG nova.compute.manager [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 806.814810] env[61974]: DEBUG nova.network.neutron [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 806.852829] env[61974]: DEBUG nova.policy [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1d6b2b442f324db1953ff5602a8865dd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '88d7eaa5f4574a67a038ce9e91650338', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 806.876542] env[61974]: DEBUG oslo_vmware.api [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Task: {'id': task-1378982, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090909} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.876795] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 806.876974] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 806.877239] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 806.877464] env[61974]: INFO nova.compute.manager [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Took 1.08 seconds to destroy the instance on the hypervisor. [ 806.877710] env[61974]: DEBUG oslo.service.loopingcall [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 806.877894] env[61974]: DEBUG nova.compute.manager [-] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 806.877988] env[61974]: DEBUG nova.network.neutron [-] [instance: f6b76518-d691-4e4f-861a-624a1684e564] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 806.888074] env[61974]: DEBUG nova.network.neutron [-] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.896342] env[61974]: DEBUG nova.network.neutron [-] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.125950] env[61974]: DEBUG nova.network.neutron [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Successfully created port: 034b3dca-49d9-4087-8e57-f11ab62a950b {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 807.188371] env[61974]: DEBUG oslo_concurrency.lockutils [req-34d50892-0853-481b-991b-35b94b3a4560 req-f04b724f-4886-44df-9f1f-52adca262fbf service nova] Releasing lock "refresh_cache-4628f895-1ae5-4d25-8095-f892b86769f5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.188830] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired lock "refresh_cache-4628f895-1ae5-4d25-8095-f892b86769f5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.189030] env[61974]: DEBUG nova.network.neutron [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 807.315749] env[61974]: DEBUG nova.compute.manager [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 807.345899] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 30455d07-4826-4561-a04f-1b4a2041402c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 807.346065] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance f6b76518-d691-4e4f-861a-624a1684e564 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 807.346215] env[61974]: WARNING nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 59b1ad04-c949-4b07-af77-f84f842dd9ee is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 807.346336] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 2b74ee60-ce70-429a-9ccb-1f96c236cf8c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 807.346452] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance b935b7e2-ba4b-452a-9eca-2fad5acc9055 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 807.346568] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 4628f895-1ae5-4d25-8095-f892b86769f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 807.346737] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 90fd5720-923c-4243-9f62-908e35fe35a6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 807.391150] env[61974]: INFO nova.compute.manager [-] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Took 1.02 seconds to deallocate network for instance. [ 807.394937] env[61974]: DEBUG nova.compute.claims [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 807.395137] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.398844] env[61974]: DEBUG nova.network.neutron [-] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.717776] env[61974]: DEBUG nova.network.neutron [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.855201] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance e6bc38d5-056f-40c2-a2ed-467200da2738 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 807.868158] env[61974]: DEBUG nova.network.neutron [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.902466] env[61974]: INFO nova.compute.manager [-] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Took 1.02 seconds to deallocate network for instance. [ 808.080754] env[61974]: DEBUG nova.compute.manager [req-beed8ae8-a311-4a4e-9231-3779bba63dbe req-3ef81561-8ea9-4b8e-922c-e320ca934a6f service nova] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Received event network-vif-deleted-fc4573ce-7909-401d-89b0-9dadf3f5e629 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 808.081368] env[61974]: DEBUG nova.compute.manager [req-beed8ae8-a311-4a4e-9231-3779bba63dbe req-3ef81561-8ea9-4b8e-922c-e320ca934a6f service nova] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Received event network-changed-034b3dca-49d9-4087-8e57-f11ab62a950b {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 808.081475] env[61974]: DEBUG nova.compute.manager [req-beed8ae8-a311-4a4e-9231-3779bba63dbe req-3ef81561-8ea9-4b8e-922c-e320ca934a6f service nova] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Refreshing instance network info cache due to event network-changed-034b3dca-49d9-4087-8e57-f11ab62a950b. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 808.081821] env[61974]: DEBUG oslo_concurrency.lockutils [req-beed8ae8-a311-4a4e-9231-3779bba63dbe req-3ef81561-8ea9-4b8e-922c-e320ca934a6f service nova] Acquiring lock "refresh_cache-90fd5720-923c-4243-9f62-908e35fe35a6" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.082250] env[61974]: DEBUG oslo_concurrency.lockutils [req-beed8ae8-a311-4a4e-9231-3779bba63dbe req-3ef81561-8ea9-4b8e-922c-e320ca934a6f service nova] Acquired lock "refresh_cache-90fd5720-923c-4243-9f62-908e35fe35a6" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.082405] env[61974]: DEBUG nova.network.neutron [req-beed8ae8-a311-4a4e-9231-3779bba63dbe req-3ef81561-8ea9-4b8e-922c-e320ca934a6f service nova] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Refreshing network info cache for port 034b3dca-49d9-4087-8e57-f11ab62a950b {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 808.155781] env[61974]: ERROR nova.compute.manager [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 034b3dca-49d9-4087-8e57-f11ab62a950b, please check neutron logs for more information. [ 808.155781] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 808.155781] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 808.155781] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 808.155781] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 808.155781] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 808.155781] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 808.155781] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 808.155781] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 808.155781] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 808.155781] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 808.155781] env[61974]: ERROR nova.compute.manager raise self.value [ 808.155781] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 808.155781] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 808.155781] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 808.155781] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 808.156278] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 808.156278] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 808.156278] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 034b3dca-49d9-4087-8e57-f11ab62a950b, please check neutron logs for more information. [ 808.156278] env[61974]: ERROR nova.compute.manager [ 808.156278] env[61974]: Traceback (most recent call last): [ 808.156278] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 808.156278] env[61974]: listener.cb(fileno) [ 808.156278] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 808.156278] env[61974]: result = function(*args, **kwargs) [ 808.156278] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 808.156278] env[61974]: return func(*args, **kwargs) [ 808.156278] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 808.156278] env[61974]: raise e [ 808.156278] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 808.156278] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 808.156278] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 808.156278] env[61974]: created_port_ids = self._update_ports_for_instance( [ 808.156278] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 808.156278] env[61974]: with excutils.save_and_reraise_exception(): [ 808.156278] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 808.156278] env[61974]: self.force_reraise() [ 808.156278] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 808.156278] env[61974]: raise self.value [ 808.156278] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 808.156278] env[61974]: updated_port = self._update_port( [ 808.156278] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 808.156278] env[61974]: _ensure_no_port_binding_failure(port) [ 808.156278] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 808.156278] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 808.157177] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 034b3dca-49d9-4087-8e57-f11ab62a950b, please check neutron logs for more information. [ 808.157177] env[61974]: Removing descriptor: 20 [ 808.328199] env[61974]: DEBUG nova.compute.manager [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 808.353160] env[61974]: DEBUG nova.virt.hardware [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 808.353414] env[61974]: DEBUG nova.virt.hardware [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 808.353572] env[61974]: DEBUG nova.virt.hardware [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 808.353751] env[61974]: DEBUG nova.virt.hardware [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 808.353896] env[61974]: DEBUG nova.virt.hardware [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 808.354061] env[61974]: DEBUG nova.virt.hardware [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 808.354273] env[61974]: DEBUG nova.virt.hardware [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 808.354430] env[61974]: DEBUG nova.virt.hardware [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 808.354596] env[61974]: DEBUG nova.virt.hardware [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 808.354752] env[61974]: DEBUG nova.virt.hardware [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 808.354922] env[61974]: DEBUG nova.virt.hardware [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 808.356151] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b549e78-9ab0-48b8-9b7e-5aa5dab39fd8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.363865] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 2601b97a-8ef6-4b61-b0e0-dd6c7c203206 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.366048] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e86b0ed-57da-40be-80f5-9584728e57b5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.380599] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Releasing lock "refresh_cache-4628f895-1ae5-4d25-8095-f892b86769f5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.380984] env[61974]: DEBUG nova.compute.manager [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 808.381189] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 808.382017] env[61974]: ERROR nova.compute.manager [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 034b3dca-49d9-4087-8e57-f11ab62a950b, please check neutron logs for more information. [ 808.382017] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Traceback (most recent call last): [ 808.382017] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 808.382017] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] yield resources [ 808.382017] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 808.382017] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] self.driver.spawn(context, instance, image_meta, [ 808.382017] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 808.382017] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 808.382017] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 808.382017] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] vm_ref = self.build_virtual_machine(instance, [ 808.382017] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 808.382425] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] vif_infos = vmwarevif.get_vif_info(self._session, [ 808.382425] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 808.382425] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] for vif in network_info: [ 808.382425] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 808.382425] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] return self._sync_wrapper(fn, *args, **kwargs) [ 808.382425] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 808.382425] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] self.wait() [ 808.382425] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 808.382425] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] self[:] = self._gt.wait() [ 808.382425] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 808.382425] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] return self._exit_event.wait() [ 808.382425] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 808.382425] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] current.throw(*self._exc) [ 808.382847] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 808.382847] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] result = function(*args, **kwargs) [ 808.382847] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 808.382847] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] return func(*args, **kwargs) [ 808.382847] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 808.382847] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] raise e [ 808.382847] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 808.382847] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] nwinfo = self.network_api.allocate_for_instance( [ 808.382847] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 808.382847] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] created_port_ids = self._update_ports_for_instance( [ 808.382847] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 808.382847] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] with excutils.save_and_reraise_exception(): [ 808.382847] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 808.383351] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] self.force_reraise() [ 808.383351] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 808.383351] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] raise self.value [ 808.383351] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 808.383351] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] updated_port = self._update_port( [ 808.383351] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 808.383351] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] _ensure_no_port_binding_failure(port) [ 808.383351] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 808.383351] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] raise exception.PortBindingFailed(port_id=port['id']) [ 808.383351] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] nova.exception.PortBindingFailed: Binding failed for port 034b3dca-49d9-4087-8e57-f11ab62a950b, please check neutron logs for more information. [ 808.383351] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] [ 808.383351] env[61974]: INFO nova.compute.manager [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Terminating instance [ 808.384184] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b53b3085-a00f-41b6-bf17-14f0b47cc691 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.386114] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Acquiring lock "refresh_cache-90fd5720-923c-4243-9f62-908e35fe35a6" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.393531] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e732b54-1fe7-4136-a106-80409c7867dd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.408052] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.415993] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4628f895-1ae5-4d25-8095-f892b86769f5 could not be found. [ 808.416200] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 808.416382] env[61974]: INFO nova.compute.manager [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 808.416627] env[61974]: DEBUG oslo.service.loopingcall [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 808.416851] env[61974]: DEBUG nova.compute.manager [-] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 808.416945] env[61974]: DEBUG nova.network.neutron [-] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 808.435152] env[61974]: DEBUG nova.network.neutron [-] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 808.600883] env[61974]: DEBUG nova.network.neutron [req-beed8ae8-a311-4a4e-9231-3779bba63dbe req-3ef81561-8ea9-4b8e-922c-e320ca934a6f service nova] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 808.853026] env[61974]: DEBUG nova.network.neutron [req-beed8ae8-a311-4a4e-9231-3779bba63dbe req-3ef81561-8ea9-4b8e-922c-e320ca934a6f service nova] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.870537] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 242d6159-5223-4815-900c-4c1285c7a90c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.938375] env[61974]: DEBUG nova.network.neutron [-] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.356409] env[61974]: DEBUG oslo_concurrency.lockutils [req-beed8ae8-a311-4a4e-9231-3779bba63dbe req-3ef81561-8ea9-4b8e-922c-e320ca934a6f service nova] Releasing lock "refresh_cache-90fd5720-923c-4243-9f62-908e35fe35a6" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.356818] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Acquired lock "refresh_cache-90fd5720-923c-4243-9f62-908e35fe35a6" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.357151] env[61974]: DEBUG nova.network.neutron [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 809.373077] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance f290da20-8a42-42f5-8902-136e434d29d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 809.441171] env[61974]: INFO nova.compute.manager [-] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Took 1.02 seconds to deallocate network for instance. [ 809.443764] env[61974]: DEBUG nova.compute.claims [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 809.443949] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.875576] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 0de509f4-48d8-43ae-9551-80ae414d7c8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 809.884719] env[61974]: DEBUG nova.network.neutron [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 809.966372] env[61974]: DEBUG nova.network.neutron [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.101885] env[61974]: DEBUG nova.compute.manager [req-37fc9da7-4e42-4840-b030-3b5adfd6bca9 req-740efcfd-0fc8-4275-843a-d360eb829950 service nova] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Received event network-vif-deleted-034b3dca-49d9-4087-8e57-f11ab62a950b {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 810.379213] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 5f62ae7a-126f-42ce-9579-57ca02c871d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 810.469145] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Releasing lock "refresh_cache-90fd5720-923c-4243-9f62-908e35fe35a6" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.469639] env[61974]: DEBUG nova.compute.manager [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 810.469936] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 810.470241] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1ebbfa02-e745-4b72-b06a-d2f62f68fcbe {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.479654] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbaa61eb-94ec-482e-88ec-0c972e9797c3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.502630] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 90fd5720-923c-4243-9f62-908e35fe35a6 could not be found. [ 810.502850] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 810.503041] env[61974]: INFO nova.compute.manager [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Took 0.03 seconds to destroy the instance on the hypervisor. [ 810.503290] env[61974]: DEBUG oslo.service.loopingcall [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 810.503498] env[61974]: DEBUG nova.compute.manager [-] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 810.503596] env[61974]: DEBUG nova.network.neutron [-] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 810.517154] env[61974]: DEBUG nova.network.neutron [-] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 810.882330] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 4545e438-8784-4911-bf2e-8eb14d38c308 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 811.019460] env[61974]: DEBUG nova.network.neutron [-] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.386061] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance e9309651-2fcb-40ad-babb-950042fe68f9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 811.522094] env[61974]: INFO nova.compute.manager [-] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Took 1.02 seconds to deallocate network for instance. [ 811.525077] env[61974]: DEBUG nova.compute.claims [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 811.525260] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.889041] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance b2d442b2-1927-481c-a232-8514444004a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 812.393710] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance db03b815-295a-4a66-9afd-a1f4ba97601f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 812.897825] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 5013beda-7f34-44fe-9159-f04e0aca5bce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 813.400967] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance b62397bb-95b4-4d07-819a-bfcfd7c6a38e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 813.904258] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 11d4f981-b167-4c81-9cd7-7e939606d400 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.407261] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 0a62f878-43c1-4aaf-9054-798572b4faa7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.910466] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 68794d97-95f7-4612-9f9f-e370afb3d852 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 815.413623] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance a9edbd98-3e67-476b-934d-15d893a62d02 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 815.916933] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 635f362a-582e-44bc-85d8-8a69943982b0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 816.420339] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance f88f0ef2-24f2-4eef-92a3-8de2ebb6944a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 816.420652] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 816.420742] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 816.688122] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e60c680-a772-45e0-89b1-fdaa9bf7a27a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.696172] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ccbb15-88d9-4e87-8a83-c4b9f7d20401 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.725421] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9296569c-a5d3-4f15-b73e-004618bd3029 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.733088] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b78487d-75d7-435d-97c4-ce3fa2bd2de4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.745997] env[61974]: DEBUG nova.compute.provider_tree [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.251108] env[61974]: DEBUG nova.scheduler.client.report [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 817.755671] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 817.755986] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 11.451s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.756367] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.150s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.533010] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e902f729-7418-4426-a586-18b7dbd73251 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.540675] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbeb048a-65fa-468f-b814-21f40b81ca24 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.571603] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016f528a-6593-455e-8060-52d963dbeacb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.579608] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c113a5a3-875e-472f-be11-d800b6ee2b87 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.594571] env[61974]: DEBUG nova.compute.provider_tree [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 819.099188] env[61974]: DEBUG nova.scheduler.client.report [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 819.605040] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.848s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.605543] env[61974]: ERROR nova.compute.manager [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9c2c80e9-3703-433d-a155-7504f2ebaba6, please check neutron logs for more information. [ 819.605543] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Traceback (most recent call last): [ 819.605543] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 819.605543] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] self.driver.spawn(context, instance, image_meta, [ 819.605543] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 819.605543] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 819.605543] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 819.605543] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] vm_ref = self.build_virtual_machine(instance, [ 819.605543] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 819.605543] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] vif_infos = vmwarevif.get_vif_info(self._session, [ 819.605543] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 819.605953] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] for vif in network_info: [ 819.605953] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 819.605953] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] return self._sync_wrapper(fn, *args, **kwargs) [ 819.605953] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 819.605953] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] self.wait() [ 819.605953] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 819.605953] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] self[:] = self._gt.wait() [ 819.605953] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 819.605953] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] return self._exit_event.wait() [ 819.605953] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 819.605953] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] result = hub.switch() [ 819.605953] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 819.605953] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] return self.greenlet.switch() [ 819.606375] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 819.606375] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] result = function(*args, **kwargs) [ 819.606375] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 819.606375] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] return func(*args, **kwargs) [ 819.606375] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 819.606375] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] raise e [ 819.606375] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 819.606375] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] nwinfo = self.network_api.allocate_for_instance( [ 819.606375] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 819.606375] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] created_port_ids = self._update_ports_for_instance( [ 819.606375] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 819.606375] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] with excutils.save_and_reraise_exception(): [ 819.606375] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 819.606789] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] self.force_reraise() [ 819.606789] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 819.606789] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] raise self.value [ 819.606789] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 819.606789] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] updated_port = self._update_port( [ 819.606789] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 819.606789] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] _ensure_no_port_binding_failure(port) [ 819.606789] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 819.606789] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] raise exception.PortBindingFailed(port_id=port['id']) [ 819.606789] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] nova.exception.PortBindingFailed: Binding failed for port 9c2c80e9-3703-433d-a155-7504f2ebaba6, please check neutron logs for more information. [ 819.606789] env[61974]: ERROR nova.compute.manager [instance: 30455d07-4826-4561-a04f-1b4a2041402c] [ 819.607159] env[61974]: DEBUG nova.compute.utils [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Binding failed for port 9c2c80e9-3703-433d-a155-7504f2ebaba6, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 819.607545] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.913s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.609512] env[61974]: INFO nova.compute.claims [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 819.613458] env[61974]: DEBUG nova.compute.manager [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Build of instance 30455d07-4826-4561-a04f-1b4a2041402c was re-scheduled: Binding failed for port 9c2c80e9-3703-433d-a155-7504f2ebaba6, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 819.613875] env[61974]: DEBUG nova.compute.manager [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 819.614118] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "refresh_cache-30455d07-4826-4561-a04f-1b4a2041402c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.614269] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "refresh_cache-30455d07-4826-4561-a04f-1b4a2041402c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.614426] env[61974]: DEBUG nova.network.neutron [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 820.134489] env[61974]: DEBUG nova.network.neutron [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.222050] env[61974]: DEBUG nova.network.neutron [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.725568] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "refresh_cache-30455d07-4826-4561-a04f-1b4a2041402c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.725799] env[61974]: DEBUG nova.compute.manager [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 820.726015] env[61974]: DEBUG nova.compute.manager [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 820.726212] env[61974]: DEBUG nova.network.neutron [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 820.743453] env[61974]: DEBUG nova.network.neutron [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.887724] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4130e747-0bf9-4f1f-902a-7f7bcb38a065 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.895196] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3174168c-3e12-4ae4-b34d-833da6bb4d7a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.925262] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7709c3d-7e80-4560-bd5b-9bcbba5daf07 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.932038] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6400ddda-f9f4-439c-983b-1d33d76e20d4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.949434] env[61974]: DEBUG nova.compute.provider_tree [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 821.248180] env[61974]: DEBUG nova.network.neutron [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.453674] env[61974]: DEBUG nova.scheduler.client.report [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 821.750990] env[61974]: INFO nova.compute.manager [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 30455d07-4826-4561-a04f-1b4a2041402c] Took 1.02 seconds to deallocate network for instance. [ 821.958731] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.351s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.959146] env[61974]: DEBUG nova.compute.manager [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 821.961904] env[61974]: DEBUG oslo_concurrency.lockutils [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.084s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.963329] env[61974]: INFO nova.compute.claims [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 822.467934] env[61974]: DEBUG nova.compute.utils [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 822.471085] env[61974]: DEBUG nova.compute.manager [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 822.471253] env[61974]: DEBUG nova.network.neutron [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 822.509960] env[61974]: DEBUG nova.policy [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '77dae392f1194571a37cd4e5b041c676', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '228e048adfca4ff4be1a32655b584986', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 822.781253] env[61974]: INFO nova.scheduler.client.report [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Deleted allocations for instance 30455d07-4826-4561-a04f-1b4a2041402c [ 822.845823] env[61974]: DEBUG nova.network.neutron [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Successfully created port: 8e91115d-ca19-4fc0-b008-94f029613285 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 822.972742] env[61974]: DEBUG nova.compute.manager [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 823.285139] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52604748-2bb9-4f32-b25a-1383585ce9e2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.291559] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb43d0d-4d1d-4fbb-b221-983014beb980 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.295685] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2337d5e8-cb67-43e6-bd59-9959f659e8a8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "30455d07-4826-4561-a04f-1b4a2041402c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 167.498s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.325173] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8cd5f86-20cb-4542-a389-4ff31cd843f9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.334193] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4caf4b7-57ac-47dc-a686-daf8664af988 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.350682] env[61974]: DEBUG nova.compute.provider_tree [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 823.480392] env[61974]: INFO nova.virt.block_device [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Booting with volume 7f75c749-8f00-4b78-82a8-4978044b3834 at /dev/sda [ 823.540707] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c638c8e7-475d-406c-b67e-f41497b5c058 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.551522] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63dfbea3-4930-48a3-8f0d-5759f507edfd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.575982] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-55dd48de-92ce-4091-893b-dd4e0156c096 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.585740] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880bdb69-cf8b-4de3-80bd-612d31ac1c34 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.619325] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-294f2aee-db7a-491c-93a5-376e4ef5a0ea {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.627413] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2aab4f-9de9-4389-8fd3-6ff188d80834 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.641559] env[61974]: DEBUG nova.virt.block_device [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Updating existing volume attachment record: 4cae4826-2040-4c34-8417-4a5865e23843 {{(pid=61974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 823.688293] env[61974]: DEBUG nova.compute.manager [req-882f1c31-025a-40d3-b8cd-8d913d610093 req-2dc0ce28-b9d2-415d-a6e6-3ba2199a10af service nova] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Received event network-changed-8e91115d-ca19-4fc0-b008-94f029613285 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 823.688507] env[61974]: DEBUG nova.compute.manager [req-882f1c31-025a-40d3-b8cd-8d913d610093 req-2dc0ce28-b9d2-415d-a6e6-3ba2199a10af service nova] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Refreshing instance network info cache due to event network-changed-8e91115d-ca19-4fc0-b008-94f029613285. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 823.688793] env[61974]: DEBUG oslo_concurrency.lockutils [req-882f1c31-025a-40d3-b8cd-8d913d610093 req-2dc0ce28-b9d2-415d-a6e6-3ba2199a10af service nova] Acquiring lock "refresh_cache-e6bc38d5-056f-40c2-a2ed-467200da2738" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.688912] env[61974]: DEBUG oslo_concurrency.lockutils [req-882f1c31-025a-40d3-b8cd-8d913d610093 req-2dc0ce28-b9d2-415d-a6e6-3ba2199a10af service nova] Acquired lock "refresh_cache-e6bc38d5-056f-40c2-a2ed-467200da2738" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.689258] env[61974]: DEBUG nova.network.neutron [req-882f1c31-025a-40d3-b8cd-8d913d610093 req-2dc0ce28-b9d2-415d-a6e6-3ba2199a10af service nova] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Refreshing network info cache for port 8e91115d-ca19-4fc0-b008-94f029613285 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 823.801191] env[61974]: DEBUG nova.compute.manager [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 823.856736] env[61974]: DEBUG nova.scheduler.client.report [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 823.917318] env[61974]: ERROR nova.compute.manager [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8e91115d-ca19-4fc0-b008-94f029613285, please check neutron logs for more information. [ 823.917318] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 823.917318] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 823.917318] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 823.917318] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 823.917318] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 823.917318] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 823.917318] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 823.917318] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 823.917318] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 823.917318] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 823.917318] env[61974]: ERROR nova.compute.manager raise self.value [ 823.917318] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 823.917318] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 823.917318] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 823.917318] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 823.917908] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 823.917908] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 823.917908] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8e91115d-ca19-4fc0-b008-94f029613285, please check neutron logs for more information. [ 823.917908] env[61974]: ERROR nova.compute.manager [ 823.917908] env[61974]: Traceback (most recent call last): [ 823.917908] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 823.917908] env[61974]: listener.cb(fileno) [ 823.917908] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 823.917908] env[61974]: result = function(*args, **kwargs) [ 823.917908] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 823.917908] env[61974]: return func(*args, **kwargs) [ 823.917908] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 823.917908] env[61974]: raise e [ 823.917908] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 823.917908] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 823.917908] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 823.917908] env[61974]: created_port_ids = self._update_ports_for_instance( [ 823.917908] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 823.917908] env[61974]: with excutils.save_and_reraise_exception(): [ 823.917908] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 823.917908] env[61974]: self.force_reraise() [ 823.917908] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 823.917908] env[61974]: raise self.value [ 823.917908] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 823.917908] env[61974]: updated_port = self._update_port( [ 823.917908] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 823.917908] env[61974]: _ensure_no_port_binding_failure(port) [ 823.917908] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 823.917908] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 823.918858] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 8e91115d-ca19-4fc0-b008-94f029613285, please check neutron logs for more information. [ 823.918858] env[61974]: Removing descriptor: 20 [ 824.178769] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 824.179049] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 824.208497] env[61974]: DEBUG nova.network.neutron [req-882f1c31-025a-40d3-b8cd-8d913d610093 req-2dc0ce28-b9d2-415d-a6e6-3ba2199a10af service nova] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.281038] env[61974]: DEBUG nova.network.neutron [req-882f1c31-025a-40d3-b8cd-8d913d610093 req-2dc0ce28-b9d2-415d-a6e6-3ba2199a10af service nova] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.319556] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.363354] env[61974]: DEBUG oslo_concurrency.lockutils [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.401s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.363948] env[61974]: DEBUG nova.compute.manager [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 824.366457] env[61974]: DEBUG oslo_concurrency.lockutils [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.356s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.368347] env[61974]: INFO nova.compute.claims [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 824.479567] env[61974]: DEBUG oslo_concurrency.lockutils [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "f0601d26-4e29-4946-bb52-50e2a2163535" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.479849] env[61974]: DEBUG oslo_concurrency.lockutils [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "f0601d26-4e29-4946-bb52-50e2a2163535" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.688027] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 824.688306] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Starting heal instance info cache {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 824.688306] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Rebuilding the list of instances to heal {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 824.783928] env[61974]: DEBUG oslo_concurrency.lockutils [req-882f1c31-025a-40d3-b8cd-8d913d610093 req-2dc0ce28-b9d2-415d-a6e6-3ba2199a10af service nova] Releasing lock "refresh_cache-e6bc38d5-056f-40c2-a2ed-467200da2738" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.872431] env[61974]: DEBUG nova.compute.utils [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 824.876323] env[61974]: DEBUG nova.compute.manager [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 824.876511] env[61974]: DEBUG nova.network.neutron [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 824.939528] env[61974]: DEBUG nova.policy [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ea7f37c2b55463b9d1d084e73dbf5c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a117fc7495e1478b83f0a543effe8e06', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 825.193206] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 825.193429] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 825.193495] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 825.193616] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 825.193721] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 825.193828] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 825.193951] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 825.370361] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "refresh_cache-f6b76518-d691-4e4f-861a-624a1684e564" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.370508] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquired lock "refresh_cache-f6b76518-d691-4e4f-861a-624a1684e564" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.370683] env[61974]: DEBUG nova.network.neutron [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Forcefully refreshing network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 825.370848] env[61974]: DEBUG nova.objects.instance [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lazy-loading 'info_cache' on Instance uuid f6b76518-d691-4e4f-861a-624a1684e564 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 825.378480] env[61974]: DEBUG nova.compute.manager [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 825.442192] env[61974]: DEBUG nova.network.neutron [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Successfully created port: 8913d868-44e2-4e14-8c71-b347d29be757 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 825.719451] env[61974]: DEBUG nova.compute.manager [req-25649d4f-da7c-41e9-90bb-982af20f7953 req-311ffb06-d69b-44f8-aeaa-89451e5466d5 service nova] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Received event network-vif-deleted-8e91115d-ca19-4fc0-b008-94f029613285 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 825.736810] env[61974]: DEBUG nova.compute.manager [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 825.737501] env[61974]: DEBUG nova.virt.hardware [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 825.737675] env[61974]: DEBUG nova.virt.hardware [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 825.737714] env[61974]: DEBUG nova.virt.hardware [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 825.737882] env[61974]: DEBUG nova.virt.hardware [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 825.738044] env[61974]: DEBUG nova.virt.hardware [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 825.738197] env[61974]: DEBUG nova.virt.hardware [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 825.738395] env[61974]: DEBUG nova.virt.hardware [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 825.738577] env[61974]: DEBUG nova.virt.hardware [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 825.738758] env[61974]: DEBUG nova.virt.hardware [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 825.738924] env[61974]: DEBUG nova.virt.hardware [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 825.739113] env[61974]: DEBUG nova.virt.hardware [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 825.742310] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a367f4c-a086-4615-83b2-38916351f8a2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.751398] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a85bdb-16c4-4ab4-8216-8eb1274d5414 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.766964] env[61974]: ERROR nova.compute.manager [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8e91115d-ca19-4fc0-b008-94f029613285, please check neutron logs for more information. [ 825.766964] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Traceback (most recent call last): [ 825.766964] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 825.766964] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] yield resources [ 825.766964] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 825.766964] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] self.driver.spawn(context, instance, image_meta, [ 825.766964] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 825.766964] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] self._vmops.spawn(context, instance, image_meta, injected_files, [ 825.766964] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 825.766964] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] vm_ref = self.build_virtual_machine(instance, [ 825.766964] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 825.767391] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] vif_infos = vmwarevif.get_vif_info(self._session, [ 825.767391] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 825.767391] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] for vif in network_info: [ 825.767391] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 825.767391] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] return self._sync_wrapper(fn, *args, **kwargs) [ 825.767391] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 825.767391] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] self.wait() [ 825.767391] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 825.767391] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] self[:] = self._gt.wait() [ 825.767391] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 825.767391] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] return self._exit_event.wait() [ 825.767391] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 825.767391] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] current.throw(*self._exc) [ 825.767908] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 825.767908] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] result = function(*args, **kwargs) [ 825.767908] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 825.767908] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] return func(*args, **kwargs) [ 825.767908] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 825.767908] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] raise e [ 825.767908] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 825.767908] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] nwinfo = self.network_api.allocate_for_instance( [ 825.767908] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 825.767908] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] created_port_ids = self._update_ports_for_instance( [ 825.767908] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 825.767908] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] with excutils.save_and_reraise_exception(): [ 825.767908] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 825.768355] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] self.force_reraise() [ 825.768355] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 825.768355] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] raise self.value [ 825.768355] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 825.768355] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] updated_port = self._update_port( [ 825.768355] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 825.768355] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] _ensure_no_port_binding_failure(port) [ 825.768355] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 825.768355] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] raise exception.PortBindingFailed(port_id=port['id']) [ 825.768355] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] nova.exception.PortBindingFailed: Binding failed for port 8e91115d-ca19-4fc0-b008-94f029613285, please check neutron logs for more information. [ 825.768355] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] [ 825.768355] env[61974]: INFO nova.compute.manager [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Terminating instance [ 825.769325] env[61974]: DEBUG nova.network.neutron [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Successfully created port: f48ed5b1-e636-41f7-9e3a-2ee0758f8f56 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 825.772744] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Acquiring lock "refresh_cache-e6bc38d5-056f-40c2-a2ed-467200da2738" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.772903] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Acquired lock "refresh_cache-e6bc38d5-056f-40c2-a2ed-467200da2738" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.773077] env[61974]: DEBUG nova.network.neutron [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 825.785145] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c943cb-5261-4859-9126-030a6bc3de57 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.791815] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a2dd18c-db18-4652-8b20-988f4681d60b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.822249] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8705f5c1-4466-4fef-9967-70e50fe98359 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.829544] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d00f2373-757c-43a7-ad96-a62901da000c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.842914] env[61974]: DEBUG nova.compute.provider_tree [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 826.060408] env[61974]: DEBUG nova.network.neutron [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Successfully created port: d4a78c8f-6022-4f39-b168-feea00fea908 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 826.295297] env[61974]: DEBUG nova.network.neutron [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 826.346403] env[61974]: DEBUG nova.scheduler.client.report [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 826.393332] env[61974]: DEBUG nova.compute.manager [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 826.398869] env[61974]: DEBUG nova.network.neutron [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 826.419041] env[61974]: DEBUG nova.network.neutron [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.427285] env[61974]: DEBUG nova.virt.hardware [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 826.427590] env[61974]: DEBUG nova.virt.hardware [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 826.427763] env[61974]: DEBUG nova.virt.hardware [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 826.427971] env[61974]: DEBUG nova.virt.hardware [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 826.428115] env[61974]: DEBUG nova.virt.hardware [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 826.428296] env[61974]: DEBUG nova.virt.hardware [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 826.429683] env[61974]: DEBUG nova.virt.hardware [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 826.429918] env[61974]: DEBUG nova.virt.hardware [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 826.430120] env[61974]: DEBUG nova.virt.hardware [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 826.430294] env[61974]: DEBUG nova.virt.hardware [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 826.430471] env[61974]: DEBUG nova.virt.hardware [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 826.431956] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8786fe96-22fa-4683-98bf-8030d2269455 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.447925] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-632c74b9-0935-4a88-8fcb-fadd3a055c2e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.852319] env[61974]: DEBUG oslo_concurrency.lockutils [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.486s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.852681] env[61974]: DEBUG nova.compute.manager [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 826.855353] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.076s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.856733] env[61974]: INFO nova.compute.claims [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 826.922297] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Releasing lock "refresh_cache-e6bc38d5-056f-40c2-a2ed-467200da2738" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.922850] env[61974]: DEBUG nova.compute.manager [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 826.923182] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5610f0f-5163-4253-b370-5b5eda1eafa8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.931991] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95afbc0-b2ce-407f-a257-aa5cdc49af2b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.953175] env[61974]: WARNING nova.virt.vmwareapi.driver [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance e6bc38d5-056f-40c2-a2ed-467200da2738 could not be found. [ 826.953395] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 826.953667] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-05ad5cc1-69c6-418f-9911-30ef329b5657 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.961025] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ec030f-d864-4450-a80c-5daafc9929ea {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.975844] env[61974]: DEBUG nova.network.neutron [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.981928] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e6bc38d5-056f-40c2-a2ed-467200da2738 could not be found. [ 826.982031] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 826.982179] env[61974]: INFO nova.compute.manager [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Took 0.06 seconds to destroy the instance on the hypervisor. [ 826.982428] env[61974]: DEBUG oslo.service.loopingcall [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 826.982641] env[61974]: DEBUG nova.compute.manager [-] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 826.982746] env[61974]: DEBUG nova.network.neutron [-] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 827.002831] env[61974]: DEBUG nova.network.neutron [-] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 827.147736] env[61974]: ERROR nova.compute.manager [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8913d868-44e2-4e14-8c71-b347d29be757, please check neutron logs for more information. [ 827.147736] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 827.147736] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 827.147736] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 827.147736] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 827.147736] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 827.147736] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 827.147736] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 827.147736] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 827.147736] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 827.147736] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 827.147736] env[61974]: ERROR nova.compute.manager raise self.value [ 827.147736] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 827.147736] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 827.147736] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 827.147736] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 827.148335] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 827.148335] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 827.148335] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8913d868-44e2-4e14-8c71-b347d29be757, please check neutron logs for more information. [ 827.148335] env[61974]: ERROR nova.compute.manager [ 827.148335] env[61974]: Traceback (most recent call last): [ 827.148335] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 827.148335] env[61974]: listener.cb(fileno) [ 827.148335] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 827.148335] env[61974]: result = function(*args, **kwargs) [ 827.148335] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 827.148335] env[61974]: return func(*args, **kwargs) [ 827.148335] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 827.148335] env[61974]: raise e [ 827.148335] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 827.148335] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 827.148335] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 827.148335] env[61974]: created_port_ids = self._update_ports_for_instance( [ 827.148335] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 827.148335] env[61974]: with excutils.save_and_reraise_exception(): [ 827.148335] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 827.148335] env[61974]: self.force_reraise() [ 827.148335] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 827.148335] env[61974]: raise self.value [ 827.148335] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 827.148335] env[61974]: updated_port = self._update_port( [ 827.148335] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 827.148335] env[61974]: _ensure_no_port_binding_failure(port) [ 827.148335] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 827.148335] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 827.149322] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 8913d868-44e2-4e14-8c71-b347d29be757, please check neutron logs for more information. [ 827.149322] env[61974]: Removing descriptor: 20 [ 827.149322] env[61974]: ERROR nova.compute.manager [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8913d868-44e2-4e14-8c71-b347d29be757, please check neutron logs for more information. [ 827.149322] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Traceback (most recent call last): [ 827.149322] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 827.149322] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] yield resources [ 827.149322] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 827.149322] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] self.driver.spawn(context, instance, image_meta, [ 827.149322] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 827.149322] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] self._vmops.spawn(context, instance, image_meta, injected_files, [ 827.149322] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 827.149322] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] vm_ref = self.build_virtual_machine(instance, [ 827.149795] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 827.149795] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] vif_infos = vmwarevif.get_vif_info(self._session, [ 827.149795] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 827.149795] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] for vif in network_info: [ 827.149795] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 827.149795] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] return self._sync_wrapper(fn, *args, **kwargs) [ 827.149795] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 827.149795] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] self.wait() [ 827.149795] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 827.149795] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] self[:] = self._gt.wait() [ 827.149795] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 827.149795] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] return self._exit_event.wait() [ 827.149795] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 827.150258] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] result = hub.switch() [ 827.150258] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 827.150258] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] return self.greenlet.switch() [ 827.150258] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 827.150258] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] result = function(*args, **kwargs) [ 827.150258] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 827.150258] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] return func(*args, **kwargs) [ 827.150258] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 827.150258] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] raise e [ 827.150258] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 827.150258] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] nwinfo = self.network_api.allocate_for_instance( [ 827.150258] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 827.150258] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] created_port_ids = self._update_ports_for_instance( [ 827.150719] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 827.150719] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] with excutils.save_and_reraise_exception(): [ 827.150719] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 827.150719] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] self.force_reraise() [ 827.150719] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 827.150719] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] raise self.value [ 827.150719] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 827.150719] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] updated_port = self._update_port( [ 827.150719] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 827.150719] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] _ensure_no_port_binding_failure(port) [ 827.150719] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 827.150719] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] raise exception.PortBindingFailed(port_id=port['id']) [ 827.151210] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] nova.exception.PortBindingFailed: Binding failed for port 8913d868-44e2-4e14-8c71-b347d29be757, please check neutron logs for more information. [ 827.151210] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] [ 827.151210] env[61974]: INFO nova.compute.manager [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Terminating instance [ 827.151832] env[61974]: DEBUG oslo_concurrency.lockutils [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Acquiring lock "refresh_cache-2601b97a-8ef6-4b61-b0e0-dd6c7c203206" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.151987] env[61974]: DEBUG oslo_concurrency.lockutils [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Acquired lock "refresh_cache-2601b97a-8ef6-4b61-b0e0-dd6c7c203206" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.152171] env[61974]: DEBUG nova.network.neutron [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 827.362310] env[61974]: DEBUG nova.compute.utils [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 827.365351] env[61974]: DEBUG nova.compute.manager [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 827.365602] env[61974]: DEBUG nova.network.neutron [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 827.407611] env[61974]: DEBUG nova.policy [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a3e6c01d83e4ea39e88b25abe6b7bee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e2ece47b612487bb6e07758ec290c91', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 827.480532] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Releasing lock "refresh_cache-f6b76518-d691-4e4f-861a-624a1684e564" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.480761] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Updated the network info_cache for instance {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 827.480981] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 827.481230] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 827.482036] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 827.482036] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 827.482036] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 827.482036] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 827.482036] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 827.482277] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager.update_available_resource {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 827.506341] env[61974]: DEBUG nova.network.neutron [-] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.712454] env[61974]: DEBUG nova.network.neutron [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 827.744555] env[61974]: DEBUG nova.compute.manager [req-a1e79ed9-8c5d-45ca-aabd-ae3aaecda646 req-13918291-8674-4cff-ab93-54b8bac66354 service nova] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Received event network-changed-8913d868-44e2-4e14-8c71-b347d29be757 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 827.744881] env[61974]: DEBUG nova.compute.manager [req-a1e79ed9-8c5d-45ca-aabd-ae3aaecda646 req-13918291-8674-4cff-ab93-54b8bac66354 service nova] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Refreshing instance network info cache due to event network-changed-8913d868-44e2-4e14-8c71-b347d29be757. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 827.744958] env[61974]: DEBUG oslo_concurrency.lockutils [req-a1e79ed9-8c5d-45ca-aabd-ae3aaecda646 req-13918291-8674-4cff-ab93-54b8bac66354 service nova] Acquiring lock "refresh_cache-2601b97a-8ef6-4b61-b0e0-dd6c7c203206" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.853603] env[61974]: DEBUG nova.network.neutron [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Successfully created port: ecd631e0-9af4-4d2f-89b3-f9f60860a9ae {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 827.870638] env[61974]: DEBUG nova.compute.manager [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 827.873902] env[61974]: DEBUG nova.network.neutron [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.985212] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.009081] env[61974]: INFO nova.compute.manager [-] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Took 1.03 seconds to deallocate network for instance. [ 828.212875] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8189bc1a-63d8-4fe1-b339-02d6b18e91f8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.224026] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b13c4b-6996-47de-b921-a2b8de8b3dc7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.255442] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a398283-ec0c-4c14-a993-82482431cdb5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.263432] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95851115-2bf1-41e1-9ff3-a5f7da1aa22a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.277843] env[61974]: DEBUG nova.compute.provider_tree [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 828.376856] env[61974]: INFO nova.virt.block_device [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Booting with volume 0bec52b9-8cac-4ccb-b3bb-0c84f9abdbdf at /dev/sda [ 828.378760] env[61974]: DEBUG oslo_concurrency.lockutils [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Releasing lock "refresh_cache-2601b97a-8ef6-4b61-b0e0-dd6c7c203206" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.379148] env[61974]: DEBUG nova.compute.manager [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 828.379289] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 828.379857] env[61974]: DEBUG oslo_concurrency.lockutils [req-a1e79ed9-8c5d-45ca-aabd-ae3aaecda646 req-13918291-8674-4cff-ab93-54b8bac66354 service nova] Acquired lock "refresh_cache-2601b97a-8ef6-4b61-b0e0-dd6c7c203206" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.380073] env[61974]: DEBUG nova.network.neutron [req-a1e79ed9-8c5d-45ca-aabd-ae3aaecda646 req-13918291-8674-4cff-ab93-54b8bac66354 service nova] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Refreshing network info cache for port 8913d868-44e2-4e14-8c71-b347d29be757 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 828.381061] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9957be1a-415c-46d2-b3c7-0158094709ac {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.393749] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91b49d86-b417-4c40-b309-136be2a38948 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.419536] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2601b97a-8ef6-4b61-b0e0-dd6c7c203206 could not be found. [ 828.419758] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 828.419939] env[61974]: INFO nova.compute.manager [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Took 0.04 seconds to destroy the instance on the hypervisor. [ 828.420280] env[61974]: DEBUG oslo.service.loopingcall [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 828.420700] env[61974]: DEBUG nova.compute.manager [-] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 828.420833] env[61974]: DEBUG nova.network.neutron [-] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 828.432922] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aefdfb78-c941-46a4-80d0-b9d5b45ea64e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.441143] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a8d584-0754-413e-9a8e-33eb1ce52d36 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.462903] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1bc5cf6a-06a5-437c-aa79-bdfa30e8de8d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.464975] env[61974]: DEBUG nova.network.neutron [-] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 828.472701] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf751108-32b5-49b2-a0ca-dcf17780106d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.495174] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b0248c1-7bae-4880-bb07-02f130c681b9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.500994] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe2a314-638c-49b5-b6c6-581636046b48 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.514473] env[61974]: DEBUG nova.virt.block_device [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Updating existing volume attachment record: 64d3e1f4-2fcb-4b7d-92f0-3596f38f9475 {{(pid=61974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 828.583749] env[61974]: INFO nova.compute.manager [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Took 0.57 seconds to detach 1 volumes for instance. [ 828.591025] env[61974]: DEBUG nova.compute.claims [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 828.591025] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.781303] env[61974]: DEBUG nova.scheduler.client.report [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 828.959038] env[61974]: DEBUG nova.network.neutron [req-a1e79ed9-8c5d-45ca-aabd-ae3aaecda646 req-13918291-8674-4cff-ab93-54b8bac66354 service nova] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 829.159106] env[61974]: DEBUG nova.network.neutron [req-a1e79ed9-8c5d-45ca-aabd-ae3aaecda646 req-13918291-8674-4cff-ab93-54b8bac66354 service nova] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.247053] env[61974]: ERROR nova.compute.manager [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ecd631e0-9af4-4d2f-89b3-f9f60860a9ae, please check neutron logs for more information. [ 829.247053] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 829.247053] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 829.247053] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 829.247053] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 829.247053] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 829.247053] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 829.247053] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 829.247053] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 829.247053] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 829.247053] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 829.247053] env[61974]: ERROR nova.compute.manager raise self.value [ 829.247053] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 829.247053] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 829.247053] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 829.247053] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 829.247566] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 829.247566] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 829.247566] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ecd631e0-9af4-4d2f-89b3-f9f60860a9ae, please check neutron logs for more information. [ 829.247566] env[61974]: ERROR nova.compute.manager [ 829.247566] env[61974]: Traceback (most recent call last): [ 829.247566] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 829.247566] env[61974]: listener.cb(fileno) [ 829.247566] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 829.247566] env[61974]: result = function(*args, **kwargs) [ 829.247566] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 829.247566] env[61974]: return func(*args, **kwargs) [ 829.247566] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 829.247566] env[61974]: raise e [ 829.247566] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 829.247566] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 829.247566] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 829.247566] env[61974]: created_port_ids = self._update_ports_for_instance( [ 829.247566] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 829.247566] env[61974]: with excutils.save_and_reraise_exception(): [ 829.247566] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 829.247566] env[61974]: self.force_reraise() [ 829.247566] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 829.247566] env[61974]: raise self.value [ 829.247566] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 829.247566] env[61974]: updated_port = self._update_port( [ 829.247566] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 829.247566] env[61974]: _ensure_no_port_binding_failure(port) [ 829.247566] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 829.247566] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 829.248610] env[61974]: nova.exception.PortBindingFailed: Binding failed for port ecd631e0-9af4-4d2f-89b3-f9f60860a9ae, please check neutron logs for more information. [ 829.248610] env[61974]: Removing descriptor: 20 [ 829.286148] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.431s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.286728] env[61974]: DEBUG nova.compute.manager [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 829.289342] env[61974]: DEBUG oslo_concurrency.lockutils [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.308s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.662156] env[61974]: DEBUG oslo_concurrency.lockutils [req-a1e79ed9-8c5d-45ca-aabd-ae3aaecda646 req-13918291-8674-4cff-ab93-54b8bac66354 service nova] Releasing lock "refresh_cache-2601b97a-8ef6-4b61-b0e0-dd6c7c203206" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.662428] env[61974]: DEBUG nova.compute.manager [req-a1e79ed9-8c5d-45ca-aabd-ae3aaecda646 req-13918291-8674-4cff-ab93-54b8bac66354 service nova] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Received event network-vif-deleted-8913d868-44e2-4e14-8c71-b347d29be757 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 829.706663] env[61974]: DEBUG nova.network.neutron [-] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.772149] env[61974]: DEBUG nova.compute.manager [req-b4c18772-cb72-41d6-a363-62076f6c0e3e req-87221211-e68d-4657-8f61-e3978226320d service nova] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Received event network-changed-ecd631e0-9af4-4d2f-89b3-f9f60860a9ae {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 829.772341] env[61974]: DEBUG nova.compute.manager [req-b4c18772-cb72-41d6-a363-62076f6c0e3e req-87221211-e68d-4657-8f61-e3978226320d service nova] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Refreshing instance network info cache due to event network-changed-ecd631e0-9af4-4d2f-89b3-f9f60860a9ae. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 829.772553] env[61974]: DEBUG oslo_concurrency.lockutils [req-b4c18772-cb72-41d6-a363-62076f6c0e3e req-87221211-e68d-4657-8f61-e3978226320d service nova] Acquiring lock "refresh_cache-242d6159-5223-4815-900c-4c1285c7a90c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.772677] env[61974]: DEBUG oslo_concurrency.lockutils [req-b4c18772-cb72-41d6-a363-62076f6c0e3e req-87221211-e68d-4657-8f61-e3978226320d service nova] Acquired lock "refresh_cache-242d6159-5223-4815-900c-4c1285c7a90c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.772838] env[61974]: DEBUG nova.network.neutron [req-b4c18772-cb72-41d6-a363-62076f6c0e3e req-87221211-e68d-4657-8f61-e3978226320d service nova] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Refreshing network info cache for port ecd631e0-9af4-4d2f-89b3-f9f60860a9ae {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 829.793758] env[61974]: DEBUG nova.compute.utils [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 829.798447] env[61974]: DEBUG nova.compute.manager [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 829.798645] env[61974]: DEBUG nova.network.neutron [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 829.847343] env[61974]: DEBUG nova.policy [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c3e621edf24c4d41b5f6ee3256a4c7e3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f9782bcdfaa44b999f27626a7d05b227', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 830.110333] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715738a4-d498-4f0d-ab9a-1cc1c0085d28 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.118251] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4aeba93-9257-47b9-8762-7b6a7c8d27ea {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.150591] env[61974]: DEBUG nova.network.neutron [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Successfully created port: 9bb23585-3e69-475a-b54a-f45ef2fcbb1c {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 830.153015] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3bfcc3-3cea-4373-8062-b2af3f3ed9ae {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.160476] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aefc062d-d2ad-4e4a-b0a8-de8b53b0850b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.173474] env[61974]: DEBUG nova.compute.provider_tree [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 830.209346] env[61974]: INFO nova.compute.manager [-] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Took 1.79 seconds to deallocate network for instance. [ 830.211797] env[61974]: DEBUG nova.compute.claims [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 830.212071] env[61974]: DEBUG oslo_concurrency.lockutils [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.293537] env[61974]: DEBUG nova.network.neutron [req-b4c18772-cb72-41d6-a363-62076f6c0e3e req-87221211-e68d-4657-8f61-e3978226320d service nova] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 830.298903] env[61974]: DEBUG nova.compute.manager [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 830.445218] env[61974]: DEBUG nova.network.neutron [req-b4c18772-cb72-41d6-a363-62076f6c0e3e req-87221211-e68d-4657-8f61-e3978226320d service nova] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.659125] env[61974]: DEBUG nova.compute.manager [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 830.659125] env[61974]: DEBUG nova.virt.hardware [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 830.659125] env[61974]: DEBUG nova.virt.hardware [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 830.659125] env[61974]: DEBUG nova.virt.hardware [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 830.659317] env[61974]: DEBUG nova.virt.hardware [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 830.659317] env[61974]: DEBUG nova.virt.hardware [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 830.659317] env[61974]: DEBUG nova.virt.hardware [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 830.659317] env[61974]: DEBUG nova.virt.hardware [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 830.659317] env[61974]: DEBUG nova.virt.hardware [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 830.659478] env[61974]: DEBUG nova.virt.hardware [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 830.659478] env[61974]: DEBUG nova.virt.hardware [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 830.659478] env[61974]: DEBUG nova.virt.hardware [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 830.661494] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7af619-e416-4cc5-ad3b-8cfc9690ee7a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.673155] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a31ec281-11a6-437c-b457-cf009bfe5f97 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.677494] env[61974]: DEBUG nova.scheduler.client.report [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 830.691010] env[61974]: ERROR nova.compute.manager [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ecd631e0-9af4-4d2f-89b3-f9f60860a9ae, please check neutron logs for more information. [ 830.691010] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Traceback (most recent call last): [ 830.691010] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 830.691010] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] yield resources [ 830.691010] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 830.691010] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] self.driver.spawn(context, instance, image_meta, [ 830.691010] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 830.691010] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 830.691010] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 830.691010] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] vm_ref = self.build_virtual_machine(instance, [ 830.691010] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 830.691649] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] vif_infos = vmwarevif.get_vif_info(self._session, [ 830.691649] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 830.691649] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] for vif in network_info: [ 830.691649] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 830.691649] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] return self._sync_wrapper(fn, *args, **kwargs) [ 830.691649] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 830.691649] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] self.wait() [ 830.691649] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 830.691649] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] self[:] = self._gt.wait() [ 830.691649] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 830.691649] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] return self._exit_event.wait() [ 830.691649] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 830.691649] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] current.throw(*self._exc) [ 830.692548] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 830.692548] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] result = function(*args, **kwargs) [ 830.692548] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 830.692548] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] return func(*args, **kwargs) [ 830.692548] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 830.692548] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] raise e [ 830.692548] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 830.692548] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] nwinfo = self.network_api.allocate_for_instance( [ 830.692548] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 830.692548] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] created_port_ids = self._update_ports_for_instance( [ 830.692548] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 830.692548] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] with excutils.save_and_reraise_exception(): [ 830.692548] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 830.693366] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] self.force_reraise() [ 830.693366] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 830.693366] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] raise self.value [ 830.693366] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 830.693366] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] updated_port = self._update_port( [ 830.693366] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 830.693366] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] _ensure_no_port_binding_failure(port) [ 830.693366] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 830.693366] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] raise exception.PortBindingFailed(port_id=port['id']) [ 830.693366] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] nova.exception.PortBindingFailed: Binding failed for port ecd631e0-9af4-4d2f-89b3-f9f60860a9ae, please check neutron logs for more information. [ 830.693366] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] [ 830.693366] env[61974]: INFO nova.compute.manager [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Terminating instance [ 830.694164] env[61974]: DEBUG oslo_concurrency.lockutils [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Acquiring lock "refresh_cache-242d6159-5223-4815-900c-4c1285c7a90c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.949191] env[61974]: DEBUG oslo_concurrency.lockutils [req-b4c18772-cb72-41d6-a363-62076f6c0e3e req-87221211-e68d-4657-8f61-e3978226320d service nova] Releasing lock "refresh_cache-242d6159-5223-4815-900c-4c1285c7a90c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.949474] env[61974]: DEBUG nova.compute.manager [req-b4c18772-cb72-41d6-a363-62076f6c0e3e req-87221211-e68d-4657-8f61-e3978226320d service nova] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Received event network-vif-deleted-ecd631e0-9af4-4d2f-89b3-f9f60860a9ae {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 830.949843] env[61974]: DEBUG oslo_concurrency.lockutils [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Acquired lock "refresh_cache-242d6159-5223-4815-900c-4c1285c7a90c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.950026] env[61974]: DEBUG nova.network.neutron [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 831.183552] env[61974]: DEBUG oslo_concurrency.lockutils [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.894s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.184254] env[61974]: ERROR nova.compute.manager [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9d2964d9-0189-4d44-86a8-494c91ea9068, please check neutron logs for more information. [ 831.184254] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Traceback (most recent call last): [ 831.184254] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 831.184254] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] self.driver.spawn(context, instance, image_meta, [ 831.184254] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 831.184254] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 831.184254] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 831.184254] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] vm_ref = self.build_virtual_machine(instance, [ 831.184254] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 831.184254] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] vif_infos = vmwarevif.get_vif_info(self._session, [ 831.184254] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 831.184774] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] for vif in network_info: [ 831.184774] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 831.184774] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] return self._sync_wrapper(fn, *args, **kwargs) [ 831.184774] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 831.184774] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] self.wait() [ 831.184774] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 831.184774] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] self[:] = self._gt.wait() [ 831.184774] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 831.184774] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] return self._exit_event.wait() [ 831.184774] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 831.184774] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] current.throw(*self._exc) [ 831.184774] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 831.184774] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] result = function(*args, **kwargs) [ 831.185184] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 831.185184] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] return func(*args, **kwargs) [ 831.185184] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 831.185184] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] raise e [ 831.185184] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 831.185184] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] nwinfo = self.network_api.allocate_for_instance( [ 831.185184] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 831.185184] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] created_port_ids = self._update_ports_for_instance( [ 831.185184] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 831.185184] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] with excutils.save_and_reraise_exception(): [ 831.185184] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 831.185184] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] self.force_reraise() [ 831.185184] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 831.185619] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] raise self.value [ 831.185619] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 831.185619] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] updated_port = self._update_port( [ 831.185619] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 831.185619] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] _ensure_no_port_binding_failure(port) [ 831.185619] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 831.185619] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] raise exception.PortBindingFailed(port_id=port['id']) [ 831.185619] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] nova.exception.PortBindingFailed: Binding failed for port 9d2964d9-0189-4d44-86a8-494c91ea9068, please check neutron logs for more information. [ 831.185619] env[61974]: ERROR nova.compute.manager [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] [ 831.185619] env[61974]: DEBUG nova.compute.utils [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Binding failed for port 9d2964d9-0189-4d44-86a8-494c91ea9068, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 831.186266] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.285s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.186466] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.188523] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.610s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.188699] env[61974]: DEBUG nova.objects.instance [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61974) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 831.191638] env[61974]: DEBUG nova.compute.manager [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Build of instance 2b74ee60-ce70-429a-9ccb-1f96c236cf8c was re-scheduled: Binding failed for port 9d2964d9-0189-4d44-86a8-494c91ea9068, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 831.192112] env[61974]: DEBUG nova.compute.manager [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 831.192371] env[61974]: DEBUG oslo_concurrency.lockutils [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Acquiring lock "refresh_cache-2b74ee60-ce70-429a-9ccb-1f96c236cf8c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.192523] env[61974]: DEBUG oslo_concurrency.lockutils [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Acquired lock "refresh_cache-2b74ee60-ce70-429a-9ccb-1f96c236cf8c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.192688] env[61974]: DEBUG nova.network.neutron [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 831.220383] env[61974]: INFO nova.scheduler.client.report [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Deleted allocations for instance 59b1ad04-c949-4b07-af77-f84f842dd9ee [ 831.311809] env[61974]: DEBUG nova.compute.manager [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 831.342118] env[61974]: DEBUG nova.virt.hardware [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 831.342118] env[61974]: DEBUG nova.virt.hardware [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 831.342118] env[61974]: DEBUG nova.virt.hardware [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 831.342300] env[61974]: DEBUG nova.virt.hardware [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 831.342300] env[61974]: DEBUG nova.virt.hardware [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 831.342300] env[61974]: DEBUG nova.virt.hardware [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 831.342300] env[61974]: DEBUG nova.virt.hardware [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 831.342300] env[61974]: DEBUG nova.virt.hardware [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 831.342476] env[61974]: DEBUG nova.virt.hardware [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 831.342476] env[61974]: DEBUG nova.virt.hardware [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 831.343107] env[61974]: DEBUG nova.virt.hardware [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 831.344421] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eebbfb6-8289-49c6-b788-c9779de34bb0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.351240] env[61974]: ERROR nova.compute.manager [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9bb23585-3e69-475a-b54a-f45ef2fcbb1c, please check neutron logs for more information. [ 831.351240] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 831.351240] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 831.351240] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 831.351240] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 831.351240] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 831.351240] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 831.351240] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 831.351240] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 831.351240] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 831.351240] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 831.351240] env[61974]: ERROR nova.compute.manager raise self.value [ 831.351240] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 831.351240] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 831.351240] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 831.351240] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 831.354375] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 831.354375] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 831.354375] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9bb23585-3e69-475a-b54a-f45ef2fcbb1c, please check neutron logs for more information. [ 831.354375] env[61974]: ERROR nova.compute.manager [ 831.354375] env[61974]: Traceback (most recent call last): [ 831.354375] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 831.354375] env[61974]: listener.cb(fileno) [ 831.354375] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 831.354375] env[61974]: result = function(*args, **kwargs) [ 831.354375] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 831.354375] env[61974]: return func(*args, **kwargs) [ 831.354375] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 831.354375] env[61974]: raise e [ 831.354375] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 831.354375] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 831.354375] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 831.354375] env[61974]: created_port_ids = self._update_ports_for_instance( [ 831.354375] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 831.354375] env[61974]: with excutils.save_and_reraise_exception(): [ 831.354375] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 831.354375] env[61974]: self.force_reraise() [ 831.354375] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 831.354375] env[61974]: raise self.value [ 831.354375] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 831.354375] env[61974]: updated_port = self._update_port( [ 831.354375] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 831.354375] env[61974]: _ensure_no_port_binding_failure(port) [ 831.354375] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 831.354375] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 831.355324] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 9bb23585-3e69-475a-b54a-f45ef2fcbb1c, please check neutron logs for more information. [ 831.355324] env[61974]: Removing descriptor: 20 [ 831.355533] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-165269cc-9d33-4b43-9968-4817ca56c41f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.370926] env[61974]: ERROR nova.compute.manager [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9bb23585-3e69-475a-b54a-f45ef2fcbb1c, please check neutron logs for more information. [ 831.370926] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] Traceback (most recent call last): [ 831.370926] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 831.370926] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] yield resources [ 831.370926] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 831.370926] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] self.driver.spawn(context, instance, image_meta, [ 831.370926] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 831.370926] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 831.370926] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 831.370926] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] vm_ref = self.build_virtual_machine(instance, [ 831.370926] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 831.371350] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] vif_infos = vmwarevif.get_vif_info(self._session, [ 831.371350] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 831.371350] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] for vif in network_info: [ 831.371350] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 831.371350] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] return self._sync_wrapper(fn, *args, **kwargs) [ 831.371350] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 831.371350] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] self.wait() [ 831.371350] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 831.371350] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] self[:] = self._gt.wait() [ 831.371350] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 831.371350] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] return self._exit_event.wait() [ 831.371350] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 831.371350] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] current.throw(*self._exc) [ 831.371785] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 831.371785] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] result = function(*args, **kwargs) [ 831.371785] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 831.371785] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] return func(*args, **kwargs) [ 831.371785] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 831.371785] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] raise e [ 831.371785] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 831.371785] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] nwinfo = self.network_api.allocate_for_instance( [ 831.371785] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 831.371785] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] created_port_ids = self._update_ports_for_instance( [ 831.371785] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 831.371785] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] with excutils.save_and_reraise_exception(): [ 831.371785] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 831.372210] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] self.force_reraise() [ 831.372210] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 831.372210] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] raise self.value [ 831.372210] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 831.372210] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] updated_port = self._update_port( [ 831.372210] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 831.372210] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] _ensure_no_port_binding_failure(port) [ 831.372210] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 831.372210] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] raise exception.PortBindingFailed(port_id=port['id']) [ 831.372210] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] nova.exception.PortBindingFailed: Binding failed for port 9bb23585-3e69-475a-b54a-f45ef2fcbb1c, please check neutron logs for more information. [ 831.372210] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] [ 831.372210] env[61974]: INFO nova.compute.manager [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Terminating instance [ 831.373736] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Acquiring lock "refresh_cache-f290da20-8a42-42f5-8902-136e434d29d0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.373905] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Acquired lock "refresh_cache-f290da20-8a42-42f5-8902-136e434d29d0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.374086] env[61974]: DEBUG nova.network.neutron [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 831.465982] env[61974]: DEBUG nova.network.neutron [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 831.552612] env[61974]: DEBUG nova.network.neutron [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.714540] env[61974]: DEBUG nova.network.neutron [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 831.730717] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ef8d656a-7b46-4a02-9909-9f5e8d6cfa80 tempest-ServersAaction247Test-527567975 tempest-ServersAaction247Test-527567975-project-member] Lock "59b1ad04-c949-4b07-af77-f84f842dd9ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.582s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.798769] env[61974]: DEBUG nova.compute.manager [req-f5c23ec7-2094-4242-bf8f-8cefcd1c3c7b req-181dad1c-312c-4ddd-9b25-5e7059824b16 service nova] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Received event network-changed-9bb23585-3e69-475a-b54a-f45ef2fcbb1c {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 831.798827] env[61974]: DEBUG nova.compute.manager [req-f5c23ec7-2094-4242-bf8f-8cefcd1c3c7b req-181dad1c-312c-4ddd-9b25-5e7059824b16 service nova] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Refreshing instance network info cache due to event network-changed-9bb23585-3e69-475a-b54a-f45ef2fcbb1c. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 831.798999] env[61974]: DEBUG oslo_concurrency.lockutils [req-f5c23ec7-2094-4242-bf8f-8cefcd1c3c7b req-181dad1c-312c-4ddd-9b25-5e7059824b16 service nova] Acquiring lock "refresh_cache-f290da20-8a42-42f5-8902-136e434d29d0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.804458] env[61974]: DEBUG nova.network.neutron [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.892447] env[61974]: DEBUG nova.network.neutron [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 831.975602] env[61974]: DEBUG nova.network.neutron [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.056782] env[61974]: DEBUG oslo_concurrency.lockutils [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Releasing lock "refresh_cache-242d6159-5223-4815-900c-4c1285c7a90c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.056889] env[61974]: DEBUG nova.compute.manager [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 832.057606] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-66c22b16-88f5-4c4f-9f5c-8d2bb5907a41 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.066458] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c31fc84-6e6b-468a-b478-ae93138865ec {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.089570] env[61974]: WARNING nova.virt.vmwareapi.driver [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 242d6159-5223-4815-900c-4c1285c7a90c could not be found. [ 832.089793] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 832.090099] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9d20f397-c3b9-46d9-836e-ec586325ee37 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.098176] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315792f8-ab54-482a-9b9d-9a7f2b0ca1ff {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.121019] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 242d6159-5223-4815-900c-4c1285c7a90c could not be found. [ 832.121165] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 832.121293] env[61974]: INFO nova.compute.manager [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Took 0.06 seconds to destroy the instance on the hypervisor. [ 832.121293] env[61974]: DEBUG oslo.service.loopingcall [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 832.121293] env[61974]: DEBUG nova.compute.manager [-] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 832.121293] env[61974]: DEBUG nova.network.neutron [-] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 832.137082] env[61974]: DEBUG nova.network.neutron [-] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 832.202637] env[61974]: DEBUG oslo_concurrency.lockutils [None req-65fc5e70-d954-4063-b1c3-5f41f60bb752 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.203674] env[61974]: DEBUG oslo_concurrency.lockutils [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.570s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.205115] env[61974]: INFO nova.compute.claims [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 832.307787] env[61974]: DEBUG oslo_concurrency.lockutils [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Releasing lock "refresh_cache-2b74ee60-ce70-429a-9ccb-1f96c236cf8c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.307787] env[61974]: DEBUG nova.compute.manager [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 832.307787] env[61974]: DEBUG nova.compute.manager [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 832.307787] env[61974]: DEBUG nova.network.neutron [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 832.324519] env[61974]: DEBUG nova.network.neutron [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 832.477947] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Releasing lock "refresh_cache-f290da20-8a42-42f5-8902-136e434d29d0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.478812] env[61974]: DEBUG nova.compute.manager [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 832.478812] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 832.479035] env[61974]: DEBUG oslo_concurrency.lockutils [req-f5c23ec7-2094-4242-bf8f-8cefcd1c3c7b req-181dad1c-312c-4ddd-9b25-5e7059824b16 service nova] Acquired lock "refresh_cache-f290da20-8a42-42f5-8902-136e434d29d0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.480031] env[61974]: DEBUG nova.network.neutron [req-f5c23ec7-2094-4242-bf8f-8cefcd1c3c7b req-181dad1c-312c-4ddd-9b25-5e7059824b16 service nova] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Refreshing network info cache for port 9bb23585-3e69-475a-b54a-f45ef2fcbb1c {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 832.481062] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eebe68b3-7680-4314-9ad6-40414dd95132 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.490144] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080812cd-3089-435a-ba00-9993a19f46fa {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.512110] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f290da20-8a42-42f5-8902-136e434d29d0 could not be found. [ 832.512340] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 832.512564] env[61974]: INFO nova.compute.manager [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Took 0.03 seconds to destroy the instance on the hypervisor. [ 832.512812] env[61974]: DEBUG oslo.service.loopingcall [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 832.513031] env[61974]: DEBUG nova.compute.manager [-] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 832.513129] env[61974]: DEBUG nova.network.neutron [-] [instance: f290da20-8a42-42f5-8902-136e434d29d0] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 832.536827] env[61974]: DEBUG nova.network.neutron [-] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 832.640046] env[61974]: DEBUG nova.network.neutron [-] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.827324] env[61974]: DEBUG nova.network.neutron [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.999497] env[61974]: DEBUG nova.network.neutron [req-f5c23ec7-2094-4242-bf8f-8cefcd1c3c7b req-181dad1c-312c-4ddd-9b25-5e7059824b16 service nova] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 833.038750] env[61974]: DEBUG nova.network.neutron [-] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.096186] env[61974]: DEBUG nova.network.neutron [req-f5c23ec7-2094-4242-bf8f-8cefcd1c3c7b req-181dad1c-312c-4ddd-9b25-5e7059824b16 service nova] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.142844] env[61974]: INFO nova.compute.manager [-] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Took 1.02 seconds to deallocate network for instance. [ 833.330385] env[61974]: INFO nova.compute.manager [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] [instance: 2b74ee60-ce70-429a-9ccb-1f96c236cf8c] Took 1.02 seconds to deallocate network for instance. [ 833.502182] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3112e1c-7323-4b46-a0a9-c855f96649f9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.510051] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69cc0656-8c60-4fbd-999f-3f60d37039c2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.542594] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d9885b-cc2e-4883-a000-b4c054bd47a9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.542594] env[61974]: INFO nova.compute.manager [-] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Took 1.03 seconds to deallocate network for instance. [ 833.545651] env[61974]: DEBUG nova.compute.claims [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 833.545651] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.549182] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06dd8841-7945-41ca-912e-f684f1eed78e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.563218] env[61974]: DEBUG nova.compute.provider_tree [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 833.600764] env[61974]: DEBUG oslo_concurrency.lockutils [req-f5c23ec7-2094-4242-bf8f-8cefcd1c3c7b req-181dad1c-312c-4ddd-9b25-5e7059824b16 service nova] Releasing lock "refresh_cache-f290da20-8a42-42f5-8902-136e434d29d0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.600764] env[61974]: DEBUG nova.compute.manager [req-f5c23ec7-2094-4242-bf8f-8cefcd1c3c7b req-181dad1c-312c-4ddd-9b25-5e7059824b16 service nova] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Received event network-vif-deleted-9bb23585-3e69-475a-b54a-f45ef2fcbb1c {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 833.683926] env[61974]: INFO nova.compute.manager [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Took 0.54 seconds to detach 1 volumes for instance. [ 833.687875] env[61974]: DEBUG nova.compute.claims [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 833.688491] env[61974]: DEBUG oslo_concurrency.lockutils [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.066129] env[61974]: DEBUG nova.scheduler.client.report [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 834.363371] env[61974]: INFO nova.scheduler.client.report [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Deleted allocations for instance 2b74ee60-ce70-429a-9ccb-1f96c236cf8c [ 834.571368] env[61974]: DEBUG oslo_concurrency.lockutils [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.368s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.571929] env[61974]: DEBUG nova.compute.manager [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 834.574918] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.180s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.873253] env[61974]: DEBUG oslo_concurrency.lockutils [None req-44f984b6-96b2-495d-aa26-0392fdbd82a9 tempest-ServerTagsTestJSON-1461578565 tempest-ServerTagsTestJSON-1461578565-project-member] Lock "2b74ee60-ce70-429a-9ccb-1f96c236cf8c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.601s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.080474] env[61974]: DEBUG nova.compute.utils [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 835.085150] env[61974]: DEBUG nova.compute.manager [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 835.085150] env[61974]: DEBUG nova.network.neutron [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 835.140051] env[61974]: DEBUG nova.policy [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '934312112100440d8bbc689166b9d691', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fccd6a0b4bcf4e778822d7bb88fcc8bc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 835.374667] env[61974]: DEBUG nova.compute.manager [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 835.417728] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccabb03c-d0d3-4a0f-bf3a-612e5f915eee {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.425285] env[61974]: DEBUG nova.network.neutron [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Successfully created port: 38cb1ea8-4503-4040-9549-73260b27b3ac {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 835.433208] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b6949e-7606-44c3-be8b-3468b6fdc69b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.465250] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e3649c4-d4e8-464d-80bf-9fd3b29716ba {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.473723] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94b1e96-dd18-4d01-a426-73e2b2dc99f7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.489557] env[61974]: DEBUG nova.compute.provider_tree [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.588491] env[61974]: DEBUG nova.compute.manager [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 835.904439] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.992705] env[61974]: DEBUG nova.scheduler.client.report [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 836.253919] env[61974]: DEBUG nova.compute.manager [req-eaf9e937-1f36-48fc-a4bb-943f5e26a3a1 req-58a27792-82f6-4df1-a9d3-f05800db832f service nova] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Received event network-changed-38cb1ea8-4503-4040-9549-73260b27b3ac {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 836.253919] env[61974]: DEBUG nova.compute.manager [req-eaf9e937-1f36-48fc-a4bb-943f5e26a3a1 req-58a27792-82f6-4df1-a9d3-f05800db832f service nova] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Refreshing instance network info cache due to event network-changed-38cb1ea8-4503-4040-9549-73260b27b3ac. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 836.254065] env[61974]: DEBUG oslo_concurrency.lockutils [req-eaf9e937-1f36-48fc-a4bb-943f5e26a3a1 req-58a27792-82f6-4df1-a9d3-f05800db832f service nova] Acquiring lock "refresh_cache-0de509f4-48d8-43ae-9551-80ae414d7c8e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.254106] env[61974]: DEBUG oslo_concurrency.lockutils [req-eaf9e937-1f36-48fc-a4bb-943f5e26a3a1 req-58a27792-82f6-4df1-a9d3-f05800db832f service nova] Acquired lock "refresh_cache-0de509f4-48d8-43ae-9551-80ae414d7c8e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.254257] env[61974]: DEBUG nova.network.neutron [req-eaf9e937-1f36-48fc-a4bb-943f5e26a3a1 req-58a27792-82f6-4df1-a9d3-f05800db832f service nova] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Refreshing network info cache for port 38cb1ea8-4503-4040-9549-73260b27b3ac {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 836.401886] env[61974]: ERROR nova.compute.manager [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 38cb1ea8-4503-4040-9549-73260b27b3ac, please check neutron logs for more information. [ 836.401886] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 836.401886] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 836.401886] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 836.401886] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 836.401886] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 836.401886] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 836.401886] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 836.401886] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.401886] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 836.401886] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.401886] env[61974]: ERROR nova.compute.manager raise self.value [ 836.401886] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 836.401886] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 836.401886] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 836.401886] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 836.402609] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 836.402609] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 836.402609] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 38cb1ea8-4503-4040-9549-73260b27b3ac, please check neutron logs for more information. [ 836.402609] env[61974]: ERROR nova.compute.manager [ 836.402609] env[61974]: Traceback (most recent call last): [ 836.402609] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 836.402609] env[61974]: listener.cb(fileno) [ 836.402609] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 836.402609] env[61974]: result = function(*args, **kwargs) [ 836.402609] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 836.402609] env[61974]: return func(*args, **kwargs) [ 836.402609] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 836.402609] env[61974]: raise e [ 836.402609] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 836.402609] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 836.402609] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 836.402609] env[61974]: created_port_ids = self._update_ports_for_instance( [ 836.402609] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 836.402609] env[61974]: with excutils.save_and_reraise_exception(): [ 836.402609] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.402609] env[61974]: self.force_reraise() [ 836.402609] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.402609] env[61974]: raise self.value [ 836.402609] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 836.402609] env[61974]: updated_port = self._update_port( [ 836.402609] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 836.402609] env[61974]: _ensure_no_port_binding_failure(port) [ 836.402609] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 836.402609] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 836.403464] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 38cb1ea8-4503-4040-9549-73260b27b3ac, please check neutron logs for more information. [ 836.403464] env[61974]: Removing descriptor: 20 [ 836.498276] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.923s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.498929] env[61974]: ERROR nova.compute.manager [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 17cefec7-e4b4-4279-9a97-d86484b6c3b2, please check neutron logs for more information. [ 836.498929] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Traceback (most recent call last): [ 836.498929] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 836.498929] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] self.driver.spawn(context, instance, image_meta, [ 836.498929] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 836.498929] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] self._vmops.spawn(context, instance, image_meta, injected_files, [ 836.498929] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 836.498929] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] vm_ref = self.build_virtual_machine(instance, [ 836.498929] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 836.498929] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] vif_infos = vmwarevif.get_vif_info(self._session, [ 836.498929] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 836.499479] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] for vif in network_info: [ 836.499479] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 836.499479] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] return self._sync_wrapper(fn, *args, **kwargs) [ 836.499479] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 836.499479] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] self.wait() [ 836.499479] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 836.499479] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] self[:] = self._gt.wait() [ 836.499479] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 836.499479] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] return self._exit_event.wait() [ 836.499479] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 836.499479] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] result = hub.switch() [ 836.499479] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 836.499479] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] return self.greenlet.switch() [ 836.499863] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 836.499863] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] result = function(*args, **kwargs) [ 836.499863] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 836.499863] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] return func(*args, **kwargs) [ 836.499863] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 836.499863] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] raise e [ 836.499863] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 836.499863] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] nwinfo = self.network_api.allocate_for_instance( [ 836.499863] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 836.499863] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] created_port_ids = self._update_ports_for_instance( [ 836.499863] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 836.499863] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] with excutils.save_and_reraise_exception(): [ 836.499863] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.500423] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] self.force_reraise() [ 836.500423] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.500423] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] raise self.value [ 836.500423] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 836.500423] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] updated_port = self._update_port( [ 836.500423] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 836.500423] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] _ensure_no_port_binding_failure(port) [ 836.500423] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 836.500423] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] raise exception.PortBindingFailed(port_id=port['id']) [ 836.500423] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] nova.exception.PortBindingFailed: Binding failed for port 17cefec7-e4b4-4279-9a97-d86484b6c3b2, please check neutron logs for more information. [ 836.500423] env[61974]: ERROR nova.compute.manager [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] [ 836.500732] env[61974]: DEBUG nova.compute.utils [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Binding failed for port 17cefec7-e4b4-4279-9a97-d86484b6c3b2, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 836.500806] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.093s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.502539] env[61974]: DEBUG nova.objects.instance [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Lazy-loading 'resources' on Instance uuid f6b76518-d691-4e4f-861a-624a1684e564 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 836.502830] env[61974]: DEBUG nova.compute.manager [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Build of instance b935b7e2-ba4b-452a-9eca-2fad5acc9055 was re-scheduled: Binding failed for port 17cefec7-e4b4-4279-9a97-d86484b6c3b2, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 836.503296] env[61974]: DEBUG nova.compute.manager [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 836.503595] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Acquiring lock "refresh_cache-b935b7e2-ba4b-452a-9eca-2fad5acc9055" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.503764] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Acquired lock "refresh_cache-b935b7e2-ba4b-452a-9eca-2fad5acc9055" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.503953] env[61974]: DEBUG nova.network.neutron [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 836.598434] env[61974]: DEBUG nova.compute.manager [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 836.654911] env[61974]: DEBUG nova.virt.hardware [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 836.655224] env[61974]: DEBUG nova.virt.hardware [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 836.655396] env[61974]: DEBUG nova.virt.hardware [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 836.655607] env[61974]: DEBUG nova.virt.hardware [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 836.655764] env[61974]: DEBUG nova.virt.hardware [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 836.655911] env[61974]: DEBUG nova.virt.hardware [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 836.656130] env[61974]: DEBUG nova.virt.hardware [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 836.656288] env[61974]: DEBUG nova.virt.hardware [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 836.656452] env[61974]: DEBUG nova.virt.hardware [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 836.656617] env[61974]: DEBUG nova.virt.hardware [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 836.656786] env[61974]: DEBUG nova.virt.hardware [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 836.657736] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73485600-1cbd-4fb8-af7a-5e9ca2dfba2c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.670539] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f30c5d3-0d8f-45ae-a64a-3f88832d9ee9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.685202] env[61974]: ERROR nova.compute.manager [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 38cb1ea8-4503-4040-9549-73260b27b3ac, please check neutron logs for more information. [ 836.685202] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Traceback (most recent call last): [ 836.685202] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 836.685202] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] yield resources [ 836.685202] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 836.685202] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] self.driver.spawn(context, instance, image_meta, [ 836.685202] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 836.685202] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 836.685202] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 836.685202] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] vm_ref = self.build_virtual_machine(instance, [ 836.685202] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 836.686838] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] vif_infos = vmwarevif.get_vif_info(self._session, [ 836.686838] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 836.686838] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] for vif in network_info: [ 836.686838] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 836.686838] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] return self._sync_wrapper(fn, *args, **kwargs) [ 836.686838] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 836.686838] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] self.wait() [ 836.686838] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 836.686838] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] self[:] = self._gt.wait() [ 836.686838] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 836.686838] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] return self._exit_event.wait() [ 836.686838] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 836.686838] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] current.throw(*self._exc) [ 836.687526] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 836.687526] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] result = function(*args, **kwargs) [ 836.687526] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 836.687526] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] return func(*args, **kwargs) [ 836.687526] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 836.687526] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] raise e [ 836.687526] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 836.687526] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] nwinfo = self.network_api.allocate_for_instance( [ 836.687526] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 836.687526] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] created_port_ids = self._update_ports_for_instance( [ 836.687526] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 836.687526] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] with excutils.save_and_reraise_exception(): [ 836.687526] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.688022] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] self.force_reraise() [ 836.688022] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.688022] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] raise self.value [ 836.688022] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 836.688022] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] updated_port = self._update_port( [ 836.688022] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 836.688022] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] _ensure_no_port_binding_failure(port) [ 836.688022] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 836.688022] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] raise exception.PortBindingFailed(port_id=port['id']) [ 836.688022] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] nova.exception.PortBindingFailed: Binding failed for port 38cb1ea8-4503-4040-9549-73260b27b3ac, please check neutron logs for more information. [ 836.688022] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] [ 836.688022] env[61974]: INFO nova.compute.manager [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Terminating instance [ 836.689375] env[61974]: DEBUG oslo_concurrency.lockutils [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Acquiring lock "refresh_cache-0de509f4-48d8-43ae-9551-80ae414d7c8e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.774318] env[61974]: DEBUG nova.network.neutron [req-eaf9e937-1f36-48fc-a4bb-943f5e26a3a1 req-58a27792-82f6-4df1-a9d3-f05800db832f service nova] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 836.847793] env[61974]: DEBUG nova.network.neutron [req-eaf9e937-1f36-48fc-a4bb-943f5e26a3a1 req-58a27792-82f6-4df1-a9d3-f05800db832f service nova] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.031373] env[61974]: DEBUG nova.network.neutron [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 837.152474] env[61974]: DEBUG nova.network.neutron [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.352039] env[61974]: DEBUG oslo_concurrency.lockutils [req-eaf9e937-1f36-48fc-a4bb-943f5e26a3a1 req-58a27792-82f6-4df1-a9d3-f05800db832f service nova] Releasing lock "refresh_cache-0de509f4-48d8-43ae-9551-80ae414d7c8e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.353389] env[61974]: DEBUG oslo_concurrency.lockutils [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Acquired lock "refresh_cache-0de509f4-48d8-43ae-9551-80ae414d7c8e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.353580] env[61974]: DEBUG nova.network.neutron [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 837.355293] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b66ae3ac-8ba5-42a2-94c9-b379e57a6346 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.365328] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97da46e1-5c4f-4c49-b412-fd7e92749ca5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.395363] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80effb80-dd6a-4593-adf8-12724edeb138 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.403104] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a972a810-1d0f-40dd-afa2-9112fe87e553 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.416705] env[61974]: DEBUG nova.compute.provider_tree [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 837.650401] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Releasing lock "refresh_cache-b935b7e2-ba4b-452a-9eca-2fad5acc9055" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.650401] env[61974]: DEBUG nova.compute.manager [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 837.650960] env[61974]: DEBUG nova.compute.manager [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 837.650960] env[61974]: DEBUG nova.network.neutron [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 837.666583] env[61974]: DEBUG nova.network.neutron [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 837.877340] env[61974]: DEBUG nova.network.neutron [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 837.921321] env[61974]: DEBUG nova.scheduler.client.report [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 838.022663] env[61974]: DEBUG nova.network.neutron [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.169729] env[61974]: DEBUG nova.network.neutron [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.330842] env[61974]: DEBUG nova.compute.manager [req-89d42a20-a511-48f9-a739-75ff28551ff3 req-e80824ec-fbbb-4449-95f1-c8ad8bc60a60 service nova] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Received event network-vif-deleted-38cb1ea8-4503-4040-9549-73260b27b3ac {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 838.426927] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.926s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.429192] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.985s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.455459] env[61974]: INFO nova.scheduler.client.report [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Deleted allocations for instance f6b76518-d691-4e4f-861a-624a1684e564 [ 838.525033] env[61974]: DEBUG oslo_concurrency.lockutils [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Releasing lock "refresh_cache-0de509f4-48d8-43ae-9551-80ae414d7c8e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.525464] env[61974]: DEBUG nova.compute.manager [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 838.525651] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 838.525933] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1e1e70e4-fdc6-493d-bcd6-2eb8c02a0c65 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.534992] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ad5ceb-3853-4d1a-bf24-2ebdd034ae6c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.559311] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0de509f4-48d8-43ae-9551-80ae414d7c8e could not be found. [ 838.559536] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 838.559718] env[61974]: INFO nova.compute.manager [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Took 0.03 seconds to destroy the instance on the hypervisor. [ 838.559967] env[61974]: DEBUG oslo.service.loopingcall [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.560209] env[61974]: DEBUG nova.compute.manager [-] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 838.560303] env[61974]: DEBUG nova.network.neutron [-] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 838.579387] env[61974]: DEBUG nova.network.neutron [-] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 838.672194] env[61974]: INFO nova.compute.manager [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] [instance: b935b7e2-ba4b-452a-9eca-2fad5acc9055] Took 1.02 seconds to deallocate network for instance. [ 838.963630] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c07e4907-418d-4322-bd8c-c693f0c24e13 tempest-ServerShowV254Test-705301508 tempest-ServerShowV254Test-705301508-project-member] Lock "f6b76518-d691-4e4f-861a-624a1684e564" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.312s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.082774] env[61974]: DEBUG nova.network.neutron [-] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.309181] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6693497-a99e-4697-baaf-857cf45868b6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.324292] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a319c726-ae32-43a8-bff0-3befb36fabcd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.363049] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7848c66c-7616-49ea-aa42-2e98b63fc912 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.375578] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c08727e9-5c7e-44d1-aba8-c0fac24e18aa {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.389910] env[61974]: DEBUG nova.compute.provider_tree [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 839.585007] env[61974]: INFO nova.compute.manager [-] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Took 1.02 seconds to deallocate network for instance. [ 839.587379] env[61974]: DEBUG nova.compute.claims [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 839.587603] env[61974]: DEBUG oslo_concurrency.lockutils [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.713457] env[61974]: INFO nova.scheduler.client.report [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Deleted allocations for instance b935b7e2-ba4b-452a-9eca-2fad5acc9055 [ 839.895872] env[61974]: DEBUG nova.scheduler.client.report [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 840.220741] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8b9848e3-7d1b-4078-a8a1-d18285a5a33e tempest-AttachVolumeShelveTestJSON-180222380 tempest-AttachVolumeShelveTestJSON-180222380-project-member] Lock "b935b7e2-ba4b-452a-9eca-2fad5acc9055" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.966s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.400808] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.971s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.401475] env[61974]: ERROR nova.compute.manager [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fc4573ce-7909-401d-89b0-9dadf3f5e629, please check neutron logs for more information. [ 840.401475] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Traceback (most recent call last): [ 840.401475] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 840.401475] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] self.driver.spawn(context, instance, image_meta, [ 840.401475] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 840.401475] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 840.401475] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 840.401475] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] vm_ref = self.build_virtual_machine(instance, [ 840.401475] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 840.401475] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] vif_infos = vmwarevif.get_vif_info(self._session, [ 840.401475] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 840.401798] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] for vif in network_info: [ 840.401798] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 840.401798] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] return self._sync_wrapper(fn, *args, **kwargs) [ 840.401798] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 840.401798] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] self.wait() [ 840.401798] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 840.401798] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] self[:] = self._gt.wait() [ 840.401798] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 840.401798] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] return self._exit_event.wait() [ 840.401798] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 840.401798] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] result = hub.switch() [ 840.401798] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 840.401798] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] return self.greenlet.switch() [ 840.402071] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 840.402071] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] result = function(*args, **kwargs) [ 840.402071] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 840.402071] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] return func(*args, **kwargs) [ 840.402071] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 840.402071] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] raise e [ 840.402071] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 840.402071] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] nwinfo = self.network_api.allocate_for_instance( [ 840.402071] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 840.402071] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] created_port_ids = self._update_ports_for_instance( [ 840.402071] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 840.402071] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] with excutils.save_and_reraise_exception(): [ 840.402071] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 840.402353] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] self.force_reraise() [ 840.402353] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 840.402353] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] raise self.value [ 840.402353] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 840.402353] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] updated_port = self._update_port( [ 840.402353] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 840.402353] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] _ensure_no_port_binding_failure(port) [ 840.402353] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 840.402353] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] raise exception.PortBindingFailed(port_id=port['id']) [ 840.402353] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] nova.exception.PortBindingFailed: Binding failed for port fc4573ce-7909-401d-89b0-9dadf3f5e629, please check neutron logs for more information. [ 840.402353] env[61974]: ERROR nova.compute.manager [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] [ 840.402655] env[61974]: DEBUG nova.compute.utils [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Binding failed for port fc4573ce-7909-401d-89b0-9dadf3f5e629, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 840.404845] env[61974]: DEBUG nova.compute.manager [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Build of instance 4628f895-1ae5-4d25-8095-f892b86769f5 was re-scheduled: Binding failed for port fc4573ce-7909-401d-89b0-9dadf3f5e629, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 840.404845] env[61974]: DEBUG nova.compute.manager [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 840.404845] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "refresh_cache-4628f895-1ae5-4d25-8095-f892b86769f5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.405034] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired lock "refresh_cache-4628f895-1ae5-4d25-8095-f892b86769f5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.405074] env[61974]: DEBUG nova.network.neutron [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 840.407993] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.881s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.723605] env[61974]: DEBUG nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 840.938765] env[61974]: DEBUG nova.network.neutron [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 841.092476] env[61974]: DEBUG nova.network.neutron [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.226068] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c581ea21-385d-473e-b050-912ed10cd871 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.243837] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63274ab-b85a-4842-914b-bbfb5b122d1e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.248513] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.278351] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677a92a5-f70f-4f3a-9164-b47ed9ae523d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.286202] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73988ac5-b26f-471d-a165-355615f37331 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.299573] env[61974]: DEBUG nova.compute.provider_tree [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 841.597219] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Releasing lock "refresh_cache-4628f895-1ae5-4d25-8095-f892b86769f5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.597465] env[61974]: DEBUG nova.compute.manager [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 841.597676] env[61974]: DEBUG nova.compute.manager [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 841.597848] env[61974]: DEBUG nova.network.neutron [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 841.615979] env[61974]: DEBUG nova.network.neutron [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 841.803145] env[61974]: DEBUG nova.scheduler.client.report [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 842.012143] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "1c1404fd-a954-4849-883b-7898a7e87e2b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.012502] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "1c1404fd-a954-4849-883b-7898a7e87e2b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.118641] env[61974]: DEBUG nova.network.neutron [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.311073] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.902s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.311073] env[61974]: ERROR nova.compute.manager [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 034b3dca-49d9-4087-8e57-f11ab62a950b, please check neutron logs for more information. [ 842.311073] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Traceback (most recent call last): [ 842.311073] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 842.311073] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] self.driver.spawn(context, instance, image_meta, [ 842.311073] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 842.311073] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 842.311073] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 842.311073] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] vm_ref = self.build_virtual_machine(instance, [ 842.311285] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 842.311285] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] vif_infos = vmwarevif.get_vif_info(self._session, [ 842.311285] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 842.311285] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] for vif in network_info: [ 842.311285] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 842.311285] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] return self._sync_wrapper(fn, *args, **kwargs) [ 842.311285] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 842.311285] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] self.wait() [ 842.311285] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 842.311285] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] self[:] = self._gt.wait() [ 842.311285] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 842.311285] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] return self._exit_event.wait() [ 842.311285] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 842.311555] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] current.throw(*self._exc) [ 842.311555] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 842.311555] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] result = function(*args, **kwargs) [ 842.311555] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 842.311555] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] return func(*args, **kwargs) [ 842.311555] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 842.311555] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] raise e [ 842.311555] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 842.311555] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] nwinfo = self.network_api.allocate_for_instance( [ 842.311555] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 842.311555] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] created_port_ids = self._update_ports_for_instance( [ 842.311555] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 842.311555] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] with excutils.save_and_reraise_exception(): [ 842.311809] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 842.311809] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] self.force_reraise() [ 842.311809] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 842.311809] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] raise self.value [ 842.311809] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 842.311809] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] updated_port = self._update_port( [ 842.311809] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 842.311809] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] _ensure_no_port_binding_failure(port) [ 842.311809] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 842.311809] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] raise exception.PortBindingFailed(port_id=port['id']) [ 842.311809] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] nova.exception.PortBindingFailed: Binding failed for port 034b3dca-49d9-4087-8e57-f11ab62a950b, please check neutron logs for more information. [ 842.311809] env[61974]: ERROR nova.compute.manager [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] [ 842.312066] env[61974]: DEBUG nova.compute.utils [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Binding failed for port 034b3dca-49d9-4087-8e57-f11ab62a950b, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 842.312066] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.991s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.313763] env[61974]: INFO nova.compute.claims [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 842.316875] env[61974]: DEBUG nova.compute.manager [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Build of instance 90fd5720-923c-4243-9f62-908e35fe35a6 was re-scheduled: Binding failed for port 034b3dca-49d9-4087-8e57-f11ab62a950b, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 842.317477] env[61974]: DEBUG nova.compute.manager [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 842.317842] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Acquiring lock "refresh_cache-90fd5720-923c-4243-9f62-908e35fe35a6" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.318150] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Acquired lock "refresh_cache-90fd5720-923c-4243-9f62-908e35fe35a6" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.318433] env[61974]: DEBUG nova.network.neutron [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 842.622724] env[61974]: INFO nova.compute.manager [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 4628f895-1ae5-4d25-8095-f892b86769f5] Took 1.02 seconds to deallocate network for instance. [ 842.840173] env[61974]: DEBUG nova.network.neutron [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 842.919074] env[61974]: DEBUG nova.network.neutron [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.421813] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Releasing lock "refresh_cache-90fd5720-923c-4243-9f62-908e35fe35a6" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.422088] env[61974]: DEBUG nova.compute.manager [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 843.422330] env[61974]: DEBUG nova.compute.manager [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 843.422533] env[61974]: DEBUG nova.network.neutron [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 843.437429] env[61974]: DEBUG nova.network.neutron [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 843.592243] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e579e373-6bbb-4c24-9a4b-024a55d5b016 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.600736] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f560790a-b974-4613-80a2-bb3adc2518a5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.636826] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279f14a5-d036-4f46-81d4-b61a233fe9e8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.645393] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b339167-ad70-4cb4-8ed2-b153bcf6c215 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.659236] env[61974]: DEBUG nova.compute.provider_tree [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.660905] env[61974]: INFO nova.scheduler.client.report [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Deleted allocations for instance 4628f895-1ae5-4d25-8095-f892b86769f5 [ 843.940394] env[61974]: DEBUG nova.network.neutron [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.168164] env[61974]: DEBUG nova.scheduler.client.report [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 844.171916] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a82e4073-6336-4732-a28f-e507aff8c4d9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "4628f895-1ae5-4d25-8095-f892b86769f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 151.426s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.443125] env[61974]: INFO nova.compute.manager [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] [instance: 90fd5720-923c-4243-9f62-908e35fe35a6] Took 1.02 seconds to deallocate network for instance. [ 844.673508] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.363s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.674041] env[61974]: DEBUG nova.compute.manager [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 844.677377] env[61974]: DEBUG nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 844.680733] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 16.695s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.682065] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.682065] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 844.682065] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.093s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.685243] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cac1dd4d-2a35-455c-88a1-9512fb724bcd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.693840] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be7322e3-2357-44eb-9d4c-b35c476cf302 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.709987] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93300fe4-13ba-4b69-9369-7acea4acf6c1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.718020] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c9501f-18f2-4a3b-af70-2e9d02a46761 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.753016] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181354MB free_disk=178GB free_vcpus=48 pci_devices=None {{(pid=61974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 844.753303] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.013021] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "1aa2a63c-e352-4c9b-9445-9b45bf3ae14c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.013021] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "1aa2a63c-e352-4c9b-9445-9b45bf3ae14c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.179251] env[61974]: DEBUG nova.compute.utils [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 845.180645] env[61974]: DEBUG nova.compute.manager [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 845.180831] env[61974]: DEBUG nova.network.neutron [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 845.206164] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.386790] env[61974]: DEBUG nova.policy [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91e186033f624742a59502ddf87167f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '102785ae1c584cdb925a55afc3412fb9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 845.471593] env[61974]: INFO nova.scheduler.client.report [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Deleted allocations for instance 90fd5720-923c-4243-9f62-908e35fe35a6 [ 845.503502] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0242d427-e9d7-4ce7-820e-2f1a30f4378b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.512180] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aadab3a2-ee77-4c66-8564-43c4433acb38 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.546390] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42bf5f08-464f-4ba4-8769-1a6ff3aca42a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.553663] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caee1c8f-7c97-4044-a4a9-fb28cd0d25d9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.566574] env[61974]: DEBUG nova.compute.provider_tree [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 845.687914] env[61974]: DEBUG nova.compute.manager [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 845.797449] env[61974]: DEBUG nova.network.neutron [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Successfully created port: f1ba04af-31ec-4ecc-8250-9b2f424023ce {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 845.983478] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4783dae3-d22d-4c36-84de-0af6500e0869 tempest-VolumesAdminNegativeTest-629101252 tempest-VolumesAdminNegativeTest-629101252-project-member] Lock "90fd5720-923c-4243-9f62-908e35fe35a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 152.371s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.071458] env[61974]: DEBUG nova.scheduler.client.report [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 846.486492] env[61974]: DEBUG nova.compute.manager [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 846.582025] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.900s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.582858] env[61974]: ERROR nova.compute.manager [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8e91115d-ca19-4fc0-b008-94f029613285, please check neutron logs for more information. [ 846.582858] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Traceback (most recent call last): [ 846.582858] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 846.582858] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] self.driver.spawn(context, instance, image_meta, [ 846.582858] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 846.582858] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] self._vmops.spawn(context, instance, image_meta, injected_files, [ 846.582858] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 846.582858] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] vm_ref = self.build_virtual_machine(instance, [ 846.582858] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 846.582858] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] vif_infos = vmwarevif.get_vif_info(self._session, [ 846.582858] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 846.583617] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] for vif in network_info: [ 846.583617] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 846.583617] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] return self._sync_wrapper(fn, *args, **kwargs) [ 846.583617] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 846.583617] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] self.wait() [ 846.583617] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 846.583617] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] self[:] = self._gt.wait() [ 846.583617] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 846.583617] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] return self._exit_event.wait() [ 846.583617] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 846.583617] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] current.throw(*self._exc) [ 846.583617] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 846.583617] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] result = function(*args, **kwargs) [ 846.585286] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 846.585286] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] return func(*args, **kwargs) [ 846.585286] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 846.585286] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] raise e [ 846.585286] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 846.585286] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] nwinfo = self.network_api.allocate_for_instance( [ 846.585286] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 846.585286] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] created_port_ids = self._update_ports_for_instance( [ 846.585286] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 846.585286] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] with excutils.save_and_reraise_exception(): [ 846.585286] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 846.585286] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] self.force_reraise() [ 846.585286] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 846.586114] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] raise self.value [ 846.586114] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 846.586114] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] updated_port = self._update_port( [ 846.586114] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 846.586114] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] _ensure_no_port_binding_failure(port) [ 846.586114] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 846.586114] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] raise exception.PortBindingFailed(port_id=port['id']) [ 846.586114] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] nova.exception.PortBindingFailed: Binding failed for port 8e91115d-ca19-4fc0-b008-94f029613285, please check neutron logs for more information. [ 846.586114] env[61974]: ERROR nova.compute.manager [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] [ 846.586114] env[61974]: DEBUG nova.compute.utils [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Binding failed for port 8e91115d-ca19-4fc0-b008-94f029613285, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 846.586946] env[61974]: DEBUG oslo_concurrency.lockutils [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.373s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.588030] env[61974]: DEBUG nova.compute.manager [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Build of instance e6bc38d5-056f-40c2-a2ed-467200da2738 was re-scheduled: Binding failed for port 8e91115d-ca19-4fc0-b008-94f029613285, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 846.588751] env[61974]: DEBUG nova.compute.manager [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 846.589030] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Acquiring lock "refresh_cache-e6bc38d5-056f-40c2-a2ed-467200da2738" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.589190] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Acquired lock "refresh_cache-e6bc38d5-056f-40c2-a2ed-467200da2738" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.589476] env[61974]: DEBUG nova.network.neutron [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 846.701626] env[61974]: DEBUG nova.compute.manager [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 846.741522] env[61974]: DEBUG nova.virt.hardware [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 846.741759] env[61974]: DEBUG nova.virt.hardware [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 846.741914] env[61974]: DEBUG nova.virt.hardware [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 846.742105] env[61974]: DEBUG nova.virt.hardware [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 846.742254] env[61974]: DEBUG nova.virt.hardware [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 846.742398] env[61974]: DEBUG nova.virt.hardware [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 846.742599] env[61974]: DEBUG nova.virt.hardware [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 846.745898] env[61974]: DEBUG nova.virt.hardware [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 846.746198] env[61974]: DEBUG nova.virt.hardware [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 846.747063] env[61974]: DEBUG nova.virt.hardware [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 846.747063] env[61974]: DEBUG nova.virt.hardware [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 846.747636] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-440097cf-acbf-4d37-aa6a-d903a74474bf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.757130] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2203d0b-6f75-4616-9d0f-8294d058c7b7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.012115] env[61974]: DEBUG oslo_concurrency.lockutils [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.122127] env[61974]: DEBUG nova.network.neutron [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 847.254141] env[61974]: DEBUG nova.network.neutron [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.361043] env[61974]: DEBUG nova.compute.manager [req-57b0ff5b-9bd3-4432-b72f-8617488b7e2e req-b89c5df8-42a7-451d-8570-34712b1fd888 service nova] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Received event network-changed-f1ba04af-31ec-4ecc-8250-9b2f424023ce {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 847.361043] env[61974]: DEBUG nova.compute.manager [req-57b0ff5b-9bd3-4432-b72f-8617488b7e2e req-b89c5df8-42a7-451d-8570-34712b1fd888 service nova] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Refreshing instance network info cache due to event network-changed-f1ba04af-31ec-4ecc-8250-9b2f424023ce. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 847.361043] env[61974]: DEBUG oslo_concurrency.lockutils [req-57b0ff5b-9bd3-4432-b72f-8617488b7e2e req-b89c5df8-42a7-451d-8570-34712b1fd888 service nova] Acquiring lock "refresh_cache-5f62ae7a-126f-42ce-9579-57ca02c871d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.361043] env[61974]: DEBUG oslo_concurrency.lockutils [req-57b0ff5b-9bd3-4432-b72f-8617488b7e2e req-b89c5df8-42a7-451d-8570-34712b1fd888 service nova] Acquired lock "refresh_cache-5f62ae7a-126f-42ce-9579-57ca02c871d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.361298] env[61974]: DEBUG nova.network.neutron [req-57b0ff5b-9bd3-4432-b72f-8617488b7e2e req-b89c5df8-42a7-451d-8570-34712b1fd888 service nova] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Refreshing network info cache for port f1ba04af-31ec-4ecc-8250-9b2f424023ce {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 847.431491] env[61974]: ERROR nova.compute.manager [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f1ba04af-31ec-4ecc-8250-9b2f424023ce, please check neutron logs for more information. [ 847.431491] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 847.431491] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 847.431491] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 847.431491] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 847.431491] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 847.431491] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 847.431491] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 847.431491] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 847.431491] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 847.431491] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 847.431491] env[61974]: ERROR nova.compute.manager raise self.value [ 847.431491] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 847.431491] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 847.431491] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 847.431491] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 847.431872] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 847.431872] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 847.431872] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f1ba04af-31ec-4ecc-8250-9b2f424023ce, please check neutron logs for more information. [ 847.431872] env[61974]: ERROR nova.compute.manager [ 847.431872] env[61974]: Traceback (most recent call last): [ 847.431872] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 847.431872] env[61974]: listener.cb(fileno) [ 847.431872] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 847.431872] env[61974]: result = function(*args, **kwargs) [ 847.431872] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 847.431872] env[61974]: return func(*args, **kwargs) [ 847.431872] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 847.431872] env[61974]: raise e [ 847.431872] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 847.431872] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 847.431872] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 847.431872] env[61974]: created_port_ids = self._update_ports_for_instance( [ 847.431872] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 847.431872] env[61974]: with excutils.save_and_reraise_exception(): [ 847.431872] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 847.431872] env[61974]: self.force_reraise() [ 847.431872] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 847.431872] env[61974]: raise self.value [ 847.431872] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 847.431872] env[61974]: updated_port = self._update_port( [ 847.431872] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 847.431872] env[61974]: _ensure_no_port_binding_failure(port) [ 847.431872] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 847.431872] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 847.432468] env[61974]: nova.exception.PortBindingFailed: Binding failed for port f1ba04af-31ec-4ecc-8250-9b2f424023ce, please check neutron logs for more information. [ 847.432468] env[61974]: Removing descriptor: 20 [ 847.432468] env[61974]: ERROR nova.compute.manager [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f1ba04af-31ec-4ecc-8250-9b2f424023ce, please check neutron logs for more information. [ 847.432468] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Traceback (most recent call last): [ 847.432468] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 847.432468] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] yield resources [ 847.432468] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 847.432468] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] self.driver.spawn(context, instance, image_meta, [ 847.432468] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 847.432468] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 847.432468] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 847.432468] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] vm_ref = self.build_virtual_machine(instance, [ 847.432815] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 847.432815] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] vif_infos = vmwarevif.get_vif_info(self._session, [ 847.432815] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 847.432815] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] for vif in network_info: [ 847.432815] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 847.432815] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] return self._sync_wrapper(fn, *args, **kwargs) [ 847.432815] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 847.432815] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] self.wait() [ 847.432815] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 847.432815] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] self[:] = self._gt.wait() [ 847.432815] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 847.432815] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] return self._exit_event.wait() [ 847.432815] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 847.433111] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] result = hub.switch() [ 847.433111] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 847.433111] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] return self.greenlet.switch() [ 847.433111] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 847.433111] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] result = function(*args, **kwargs) [ 847.433111] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 847.433111] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] return func(*args, **kwargs) [ 847.433111] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 847.433111] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] raise e [ 847.433111] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 847.433111] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] nwinfo = self.network_api.allocate_for_instance( [ 847.433111] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 847.433111] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] created_port_ids = self._update_ports_for_instance( [ 847.433437] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 847.433437] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] with excutils.save_and_reraise_exception(): [ 847.433437] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 847.433437] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] self.force_reraise() [ 847.433437] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 847.433437] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] raise self.value [ 847.433437] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 847.433437] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] updated_port = self._update_port( [ 847.433437] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 847.433437] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] _ensure_no_port_binding_failure(port) [ 847.433437] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 847.433437] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] raise exception.PortBindingFailed(port_id=port['id']) [ 847.433729] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] nova.exception.PortBindingFailed: Binding failed for port f1ba04af-31ec-4ecc-8250-9b2f424023ce, please check neutron logs for more information. [ 847.433729] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] [ 847.433729] env[61974]: INFO nova.compute.manager [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Terminating instance [ 847.434733] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "refresh_cache-5f62ae7a-126f-42ce-9579-57ca02c871d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.445320] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8dfdcdd-e1e3-4f5c-acf4-ea8787a93529 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.453624] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609e60ae-b340-4e3b-893c-5d7864817e45 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.238032] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Releasing lock "refresh_cache-e6bc38d5-056f-40c2-a2ed-467200da2738" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.238032] env[61974]: DEBUG nova.compute.manager [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 848.238032] env[61974]: DEBUG nova.compute.manager [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 848.238032] env[61974]: DEBUG nova.network.neutron [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 848.243323] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a6dddd-aa8f-4a8e-83a7-afb3f14f6f5f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.247303] env[61974]: DEBUG oslo_concurrency.lockutils [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "b1fa5433-8f26-48db-a19d-d1e11245fb44" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.247303] env[61974]: DEBUG oslo_concurrency.lockutils [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "b1fa5433-8f26-48db-a19d-d1e11245fb44" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.253703] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ff5a33-2d8f-4d91-98b5-a47fb669aa21 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.268163] env[61974]: DEBUG nova.compute.provider_tree [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 848.270980] env[61974]: DEBUG nova.network.neutron [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 848.274915] env[61974]: DEBUG nova.network.neutron [req-57b0ff5b-9bd3-4432-b72f-8617488b7e2e req-b89c5df8-42a7-451d-8570-34712b1fd888 service nova] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 848.361204] env[61974]: DEBUG nova.network.neutron [req-57b0ff5b-9bd3-4432-b72f-8617488b7e2e req-b89c5df8-42a7-451d-8570-34712b1fd888 service nova] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.771740] env[61974]: DEBUG nova.scheduler.client.report [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 848.774871] env[61974]: DEBUG nova.network.neutron [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.863630] env[61974]: DEBUG oslo_concurrency.lockutils [req-57b0ff5b-9bd3-4432-b72f-8617488b7e2e req-b89c5df8-42a7-451d-8570-34712b1fd888 service nova] Releasing lock "refresh_cache-5f62ae7a-126f-42ce-9579-57ca02c871d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.864653] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquired lock "refresh_cache-5f62ae7a-126f-42ce-9579-57ca02c871d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.864653] env[61974]: DEBUG nova.network.neutron [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 849.277112] env[61974]: DEBUG oslo_concurrency.lockutils [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.692s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.277803] env[61974]: ERROR nova.compute.manager [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8913d868-44e2-4e14-8c71-b347d29be757, please check neutron logs for more information. [ 849.277803] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Traceback (most recent call last): [ 849.277803] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 849.277803] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] self.driver.spawn(context, instance, image_meta, [ 849.277803] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 849.277803] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] self._vmops.spawn(context, instance, image_meta, injected_files, [ 849.277803] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 849.277803] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] vm_ref = self.build_virtual_machine(instance, [ 849.277803] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 849.277803] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] vif_infos = vmwarevif.get_vif_info(self._session, [ 849.277803] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 849.278103] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] for vif in network_info: [ 849.278103] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 849.278103] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] return self._sync_wrapper(fn, *args, **kwargs) [ 849.278103] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 849.278103] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] self.wait() [ 849.278103] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 849.278103] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] self[:] = self._gt.wait() [ 849.278103] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 849.278103] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] return self._exit_event.wait() [ 849.278103] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 849.278103] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] result = hub.switch() [ 849.278103] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 849.278103] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] return self.greenlet.switch() [ 849.278369] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 849.278369] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] result = function(*args, **kwargs) [ 849.278369] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 849.278369] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] return func(*args, **kwargs) [ 849.278369] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 849.278369] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] raise e [ 849.278369] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 849.278369] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] nwinfo = self.network_api.allocate_for_instance( [ 849.278369] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 849.278369] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] created_port_ids = self._update_ports_for_instance( [ 849.278369] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 849.278369] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] with excutils.save_and_reraise_exception(): [ 849.278369] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 849.278711] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] self.force_reraise() [ 849.278711] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 849.278711] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] raise self.value [ 849.278711] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 849.278711] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] updated_port = self._update_port( [ 849.278711] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 849.278711] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] _ensure_no_port_binding_failure(port) [ 849.278711] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 849.278711] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] raise exception.PortBindingFailed(port_id=port['id']) [ 849.278711] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] nova.exception.PortBindingFailed: Binding failed for port 8913d868-44e2-4e14-8c71-b347d29be757, please check neutron logs for more information. [ 849.278711] env[61974]: ERROR nova.compute.manager [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] [ 849.278956] env[61974]: DEBUG nova.compute.utils [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Binding failed for port 8913d868-44e2-4e14-8c71-b347d29be757, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 849.280367] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.735s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.283394] env[61974]: INFO nova.compute.manager [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] [instance: e6bc38d5-056f-40c2-a2ed-467200da2738] Took 1.05 seconds to deallocate network for instance. [ 849.285797] env[61974]: DEBUG nova.compute.manager [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Build of instance 2601b97a-8ef6-4b61-b0e0-dd6c7c203206 was re-scheduled: Binding failed for port 8913d868-44e2-4e14-8c71-b347d29be757, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 849.286256] env[61974]: DEBUG nova.compute.manager [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 849.287095] env[61974]: DEBUG oslo_concurrency.lockutils [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Acquiring lock "refresh_cache-2601b97a-8ef6-4b61-b0e0-dd6c7c203206" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.287095] env[61974]: DEBUG oslo_concurrency.lockutils [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Acquired lock "refresh_cache-2601b97a-8ef6-4b61-b0e0-dd6c7c203206" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.287095] env[61974]: DEBUG nova.network.neutron [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 849.382719] env[61974]: DEBUG nova.compute.manager [req-129674ca-5116-4c03-993c-89fc4ff3e3c1 req-cfe4e554-fca6-4d21-ade7-23625636c695 service nova] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Received event network-vif-deleted-f1ba04af-31ec-4ecc-8250-9b2f424023ce {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 849.394537] env[61974]: DEBUG nova.network.neutron [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 849.544555] env[61974]: DEBUG nova.network.neutron [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.812759] env[61974]: DEBUG nova.network.neutron [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 849.904726] env[61974]: DEBUG nova.network.neutron [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.048293] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Releasing lock "refresh_cache-5f62ae7a-126f-42ce-9579-57ca02c871d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.048718] env[61974]: DEBUG nova.compute.manager [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 850.048913] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 850.049542] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6a592aae-e432-45d1-abb9-2fc2550be912 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.061589] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7969414b-70a4-4227-91a2-59751578dab4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.088523] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5f62ae7a-126f-42ce-9579-57ca02c871d8 could not be found. [ 850.088750] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 850.088933] env[61974]: INFO nova.compute.manager [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 850.089197] env[61974]: DEBUG oslo.service.loopingcall [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 850.091675] env[61974]: DEBUG nova.compute.manager [-] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 850.091835] env[61974]: DEBUG nova.network.neutron [-] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 850.110162] env[61974]: DEBUG nova.network.neutron [-] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 850.180615] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-217b9f92-530e-43be-9f28-8af66cf27d2b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.188101] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-effcb64b-b1d6-4425-b2db-59fbff59304a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.216883] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537fd97d-4006-4bf0-8cc3-6bb83542b785 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.224378] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297b0299-e86d-4a47-8f39-dba8f27a1a07 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.236933] env[61974]: DEBUG nova.compute.provider_tree [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.317433] env[61974]: INFO nova.scheduler.client.report [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Deleted allocations for instance e6bc38d5-056f-40c2-a2ed-467200da2738 [ 850.409140] env[61974]: DEBUG oslo_concurrency.lockutils [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Releasing lock "refresh_cache-2601b97a-8ef6-4b61-b0e0-dd6c7c203206" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.409599] env[61974]: DEBUG nova.compute.manager [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 850.409726] env[61974]: DEBUG nova.compute.manager [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 850.409899] env[61974]: DEBUG nova.network.neutron [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 850.425955] env[61974]: DEBUG nova.network.neutron [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 850.613682] env[61974]: DEBUG nova.network.neutron [-] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.741081] env[61974]: DEBUG nova.scheduler.client.report [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 850.827777] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d981171-dcae-4e57-8e75-aa62ace71e43 tempest-ServersTestBootFromVolume-1795056136 tempest-ServersTestBootFromVolume-1795056136-project-member] Lock "e6bc38d5-056f-40c2-a2ed-467200da2738" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 157.103s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.927853] env[61974]: DEBUG nova.network.neutron [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.116273] env[61974]: INFO nova.compute.manager [-] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Took 1.02 seconds to deallocate network for instance. [ 851.119173] env[61974]: DEBUG nova.compute.claims [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 851.119644] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.246650] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.967s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.247504] env[61974]: ERROR nova.compute.manager [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9bb23585-3e69-475a-b54a-f45ef2fcbb1c, please check neutron logs for more information. [ 851.247504] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] Traceback (most recent call last): [ 851.247504] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 851.247504] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] self.driver.spawn(context, instance, image_meta, [ 851.247504] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 851.247504] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 851.247504] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 851.247504] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] vm_ref = self.build_virtual_machine(instance, [ 851.247504] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 851.247504] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] vif_infos = vmwarevif.get_vif_info(self._session, [ 851.247504] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 851.248151] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] for vif in network_info: [ 851.248151] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 851.248151] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] return self._sync_wrapper(fn, *args, **kwargs) [ 851.248151] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 851.248151] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] self.wait() [ 851.248151] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 851.248151] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] self[:] = self._gt.wait() [ 851.248151] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 851.248151] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] return self._exit_event.wait() [ 851.248151] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 851.248151] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] current.throw(*self._exc) [ 851.248151] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 851.248151] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] result = function(*args, **kwargs) [ 851.248743] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 851.248743] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] return func(*args, **kwargs) [ 851.248743] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 851.248743] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] raise e [ 851.248743] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 851.248743] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] nwinfo = self.network_api.allocate_for_instance( [ 851.248743] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 851.248743] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] created_port_ids = self._update_ports_for_instance( [ 851.248743] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 851.248743] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] with excutils.save_and_reraise_exception(): [ 851.248743] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 851.248743] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] self.force_reraise() [ 851.248743] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 851.249081] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] raise self.value [ 851.249081] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 851.249081] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] updated_port = self._update_port( [ 851.249081] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 851.249081] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] _ensure_no_port_binding_failure(port) [ 851.249081] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 851.249081] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] raise exception.PortBindingFailed(port_id=port['id']) [ 851.249081] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] nova.exception.PortBindingFailed: Binding failed for port 9bb23585-3e69-475a-b54a-f45ef2fcbb1c, please check neutron logs for more information. [ 851.249081] env[61974]: ERROR nova.compute.manager [instance: f290da20-8a42-42f5-8902-136e434d29d0] [ 851.249081] env[61974]: DEBUG nova.compute.utils [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Binding failed for port 9bb23585-3e69-475a-b54a-f45ef2fcbb1c, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 851.249498] env[61974]: DEBUG oslo_concurrency.lockutils [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.561s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.252518] env[61974]: DEBUG nova.compute.manager [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Build of instance f290da20-8a42-42f5-8902-136e434d29d0 was re-scheduled: Binding failed for port 9bb23585-3e69-475a-b54a-f45ef2fcbb1c, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 851.252951] env[61974]: DEBUG nova.compute.manager [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 851.253201] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Acquiring lock "refresh_cache-f290da20-8a42-42f5-8902-136e434d29d0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.253347] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Acquired lock "refresh_cache-f290da20-8a42-42f5-8902-136e434d29d0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.253504] env[61974]: DEBUG nova.network.neutron [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 851.331623] env[61974]: DEBUG nova.compute.manager [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 851.433689] env[61974]: INFO nova.compute.manager [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 2601b97a-8ef6-4b61-b0e0-dd6c7c203206] Took 1.02 seconds to deallocate network for instance. [ 851.779854] env[61974]: DEBUG nova.network.neutron [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 851.853999] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.946623] env[61974]: DEBUG nova.network.neutron [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.062576] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a26165cc-21d9-4dd9-9a54-73b5d2240f79 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.070338] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cacc09a1-48e8-404c-823e-e9cd2c93551b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.108161] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89a840c-fa5e-4061-aa41-519875bd9375 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.116218] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69785421-543d-4c12-9584-8c7155459120 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.130309] env[61974]: DEBUG nova.compute.provider_tree [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 852.454587] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Releasing lock "refresh_cache-f290da20-8a42-42f5-8902-136e434d29d0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.454587] env[61974]: DEBUG nova.compute.manager [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 852.454587] env[61974]: DEBUG nova.compute.manager [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 852.454587] env[61974]: DEBUG nova.network.neutron [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 852.471488] env[61974]: INFO nova.scheduler.client.report [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Deleted allocations for instance 2601b97a-8ef6-4b61-b0e0-dd6c7c203206 [ 852.478389] env[61974]: DEBUG nova.network.neutron [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 852.632133] env[61974]: DEBUG nova.scheduler.client.report [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 852.983084] env[61974]: DEBUG nova.network.neutron [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.985169] env[61974]: DEBUG oslo_concurrency.lockutils [None req-693aaa44-df94-4733-8faf-df9c95841f77 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Lock "2601b97a-8ef6-4b61-b0e0-dd6c7c203206" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 153.870s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.139941] env[61974]: DEBUG oslo_concurrency.lockutils [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.890s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.140495] env[61974]: ERROR nova.compute.manager [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ecd631e0-9af4-4d2f-89b3-f9f60860a9ae, please check neutron logs for more information. [ 853.140495] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Traceback (most recent call last): [ 853.140495] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 853.140495] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] self.driver.spawn(context, instance, image_meta, [ 853.140495] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 853.140495] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 853.140495] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 853.140495] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] vm_ref = self.build_virtual_machine(instance, [ 853.140495] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 853.140495] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] vif_infos = vmwarevif.get_vif_info(self._session, [ 853.140495] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 853.140838] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] for vif in network_info: [ 853.140838] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 853.140838] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] return self._sync_wrapper(fn, *args, **kwargs) [ 853.140838] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 853.140838] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] self.wait() [ 853.140838] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 853.140838] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] self[:] = self._gt.wait() [ 853.140838] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 853.140838] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] return self._exit_event.wait() [ 853.140838] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 853.140838] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] current.throw(*self._exc) [ 853.140838] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 853.140838] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] result = function(*args, **kwargs) [ 853.141258] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 853.141258] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] return func(*args, **kwargs) [ 853.141258] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 853.141258] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] raise e [ 853.141258] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 853.141258] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] nwinfo = self.network_api.allocate_for_instance( [ 853.141258] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 853.141258] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] created_port_ids = self._update_ports_for_instance( [ 853.141258] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 853.141258] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] with excutils.save_and_reraise_exception(): [ 853.141258] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 853.141258] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] self.force_reraise() [ 853.141258] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 853.141612] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] raise self.value [ 853.141612] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 853.141612] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] updated_port = self._update_port( [ 853.141612] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 853.141612] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] _ensure_no_port_binding_failure(port) [ 853.141612] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 853.141612] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] raise exception.PortBindingFailed(port_id=port['id']) [ 853.141612] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] nova.exception.PortBindingFailed: Binding failed for port ecd631e0-9af4-4d2f-89b3-f9f60860a9ae, please check neutron logs for more information. [ 853.141612] env[61974]: ERROR nova.compute.manager [instance: 242d6159-5223-4815-900c-4c1285c7a90c] [ 853.141612] env[61974]: DEBUG nova.compute.utils [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Binding failed for port ecd631e0-9af4-4d2f-89b3-f9f60860a9ae, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 853.143349] env[61974]: DEBUG nova.compute.manager [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Build of instance 242d6159-5223-4815-900c-4c1285c7a90c was re-scheduled: Binding failed for port ecd631e0-9af4-4d2f-89b3-f9f60860a9ae, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 853.143349] env[61974]: DEBUG nova.compute.manager [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 853.143528] env[61974]: DEBUG oslo_concurrency.lockutils [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Acquiring lock "refresh_cache-242d6159-5223-4815-900c-4c1285c7a90c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.143675] env[61974]: DEBUG oslo_concurrency.lockutils [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Acquired lock "refresh_cache-242d6159-5223-4815-900c-4c1285c7a90c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.143948] env[61974]: DEBUG nova.network.neutron [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 853.144982] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.241s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.147772] env[61974]: INFO nova.compute.claims [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 853.488056] env[61974]: INFO nova.compute.manager [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] [instance: f290da20-8a42-42f5-8902-136e434d29d0] Took 1.04 seconds to deallocate network for instance. [ 853.490935] env[61974]: DEBUG nova.compute.manager [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 853.672142] env[61974]: DEBUG nova.network.neutron [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 853.766417] env[61974]: DEBUG nova.network.neutron [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.021735] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.270610] env[61974]: DEBUG oslo_concurrency.lockutils [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Releasing lock "refresh_cache-242d6159-5223-4815-900c-4c1285c7a90c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.270915] env[61974]: DEBUG nova.compute.manager [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 854.271121] env[61974]: DEBUG nova.compute.manager [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 854.271293] env[61974]: DEBUG nova.network.neutron [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 854.288147] env[61974]: DEBUG nova.network.neutron [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 854.428987] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89599591-d984-42f2-8bb7-61563c3d5b3e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.437799] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc9e5d1-53a0-48b7-927d-bcdc1f01d367 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.471821] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ab320a-74d8-4060-8eec-40fc61a3a65d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.479469] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b78138f-59ea-40e6-b6d3-c59526546cfa {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.493874] env[61974]: DEBUG nova.compute.provider_tree [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.523169] env[61974]: INFO nova.scheduler.client.report [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Deleted allocations for instance f290da20-8a42-42f5-8902-136e434d29d0 [ 854.791563] env[61974]: DEBUG nova.network.neutron [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.841082] env[61974]: DEBUG oslo_concurrency.lockutils [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Acquiring lock "7b338210-5be8-4838-b815-8f2c6cc19ccd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.841167] env[61974]: DEBUG oslo_concurrency.lockutils [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Lock "7b338210-5be8-4838-b815-8f2c6cc19ccd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.998528] env[61974]: DEBUG nova.scheduler.client.report [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 855.031831] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be3fefcf-6079-43f1-a30b-b10949513576 tempest-ServerRescueTestJSONUnderV235-58894480 tempest-ServerRescueTestJSONUnderV235-58894480-project-member] Lock "f290da20-8a42-42f5-8902-136e434d29d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 130.043s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.294454] env[61974]: INFO nova.compute.manager [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] [instance: 242d6159-5223-4815-900c-4c1285c7a90c] Took 1.02 seconds to deallocate network for instance. [ 855.509020] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.364s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.509832] env[61974]: DEBUG nova.compute.manager [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 855.514057] env[61974]: DEBUG oslo_concurrency.lockutils [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.925s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.533869] env[61974]: DEBUG nova.compute.manager [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 856.018289] env[61974]: DEBUG nova.compute.utils [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 856.019765] env[61974]: DEBUG nova.compute.manager [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 856.019942] env[61974]: DEBUG nova.network.neutron [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 856.070237] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.075061] env[61974]: DEBUG nova.policy [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a6ad6cd89d444e0aa6fd7b403f04e87', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7efa3e69e9544ce9a805b4c9bc839be6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 856.329058] env[61974]: INFO nova.scheduler.client.report [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Deleted allocations for instance 242d6159-5223-4815-900c-4c1285c7a90c [ 856.356970] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da38d7eb-d95f-4fc8-a695-4a9b92178479 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.367213] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2834e1-8e7b-4834-a744-f93e24a7d71a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.404363] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91fc6ac0-e452-4c61-8786-a67efd5bc55e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.413646] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d91a74-5e1d-46dc-988c-65215bf6d21d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.428460] env[61974]: DEBUG nova.compute.provider_tree [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.433579] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquiring lock "0ce75511-290c-4fea-9657-dfdd8d9efc4b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.433878] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "0ce75511-290c-4fea-9657-dfdd8d9efc4b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.461054] env[61974]: DEBUG nova.network.neutron [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Successfully created port: 5fd1f426-dc0b-4645-af65-d7b50ec60cfb {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 856.524092] env[61974]: DEBUG nova.compute.manager [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 856.688447] env[61974]: DEBUG oslo_concurrency.lockutils [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Acquiring lock "097ad079-9712-4183-9135-b15ad3a65d6d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.690127] env[61974]: DEBUG oslo_concurrency.lockutils [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Lock "097ad079-9712-4183-9135-b15ad3a65d6d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.843810] env[61974]: DEBUG oslo_concurrency.lockutils [None req-433eafdf-7162-4709-8516-33bee6467097 tempest-ServerActionsV293TestJSON-1858162193 tempest-ServerActionsV293TestJSON-1858162193-project-member] Lock "242d6159-5223-4815-900c-4c1285c7a90c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 138.387s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.887179] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquiring lock "45fda940-b7f0-410c-b31a-b5cd365c28fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.887444] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "45fda940-b7f0-410c-b31a-b5cd365c28fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.934756] env[61974]: DEBUG nova.scheduler.client.report [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 857.350426] env[61974]: DEBUG nova.compute.manager [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 857.442024] env[61974]: DEBUG oslo_concurrency.lockutils [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.927s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.442024] env[61974]: ERROR nova.compute.manager [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 38cb1ea8-4503-4040-9549-73260b27b3ac, please check neutron logs for more information. [ 857.442024] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Traceback (most recent call last): [ 857.442024] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 857.442024] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] self.driver.spawn(context, instance, image_meta, [ 857.442024] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 857.442024] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 857.442024] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 857.442024] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] vm_ref = self.build_virtual_machine(instance, [ 857.442409] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 857.442409] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] vif_infos = vmwarevif.get_vif_info(self._session, [ 857.442409] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 857.442409] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] for vif in network_info: [ 857.442409] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 857.442409] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] return self._sync_wrapper(fn, *args, **kwargs) [ 857.442409] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 857.442409] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] self.wait() [ 857.442409] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 857.442409] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] self[:] = self._gt.wait() [ 857.442409] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 857.442409] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] return self._exit_event.wait() [ 857.442409] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 857.442780] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] current.throw(*self._exc) [ 857.442780] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 857.442780] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] result = function(*args, **kwargs) [ 857.442780] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 857.442780] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] return func(*args, **kwargs) [ 857.442780] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 857.442780] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] raise e [ 857.442780] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 857.442780] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] nwinfo = self.network_api.allocate_for_instance( [ 857.442780] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 857.442780] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] created_port_ids = self._update_ports_for_instance( [ 857.442780] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 857.442780] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] with excutils.save_and_reraise_exception(): [ 857.443113] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 857.443113] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] self.force_reraise() [ 857.443113] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 857.443113] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] raise self.value [ 857.443113] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 857.443113] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] updated_port = self._update_port( [ 857.443113] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 857.443113] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] _ensure_no_port_binding_failure(port) [ 857.443113] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 857.443113] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] raise exception.PortBindingFailed(port_id=port['id']) [ 857.443113] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] nova.exception.PortBindingFailed: Binding failed for port 38cb1ea8-4503-4040-9549-73260b27b3ac, please check neutron logs for more information. [ 857.443113] env[61974]: ERROR nova.compute.manager [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] [ 857.443528] env[61974]: DEBUG nova.compute.utils [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Binding failed for port 38cb1ea8-4503-4040-9549-73260b27b3ac, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 857.447425] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.199s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.449707] env[61974]: INFO nova.compute.claims [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 857.452302] env[61974]: DEBUG nova.compute.manager [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Build of instance 0de509f4-48d8-43ae-9551-80ae414d7c8e was re-scheduled: Binding failed for port 38cb1ea8-4503-4040-9549-73260b27b3ac, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 857.452747] env[61974]: DEBUG nova.compute.manager [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 857.452977] env[61974]: DEBUG oslo_concurrency.lockutils [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Acquiring lock "refresh_cache-0de509f4-48d8-43ae-9551-80ae414d7c8e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.453158] env[61974]: DEBUG oslo_concurrency.lockutils [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Acquired lock "refresh_cache-0de509f4-48d8-43ae-9551-80ae414d7c8e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.453320] env[61974]: DEBUG nova.network.neutron [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 857.534779] env[61974]: DEBUG nova.compute.manager [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 857.568179] env[61974]: DEBUG nova.virt.hardware [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 857.568792] env[61974]: DEBUG nova.virt.hardware [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 857.569123] env[61974]: DEBUG nova.virt.hardware [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 857.569457] env[61974]: DEBUG nova.virt.hardware [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 857.572606] env[61974]: DEBUG nova.virt.hardware [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 857.572606] env[61974]: DEBUG nova.virt.hardware [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 857.572606] env[61974]: DEBUG nova.virt.hardware [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 857.572606] env[61974]: DEBUG nova.virt.hardware [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 857.572606] env[61974]: DEBUG nova.virt.hardware [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 857.572800] env[61974]: DEBUG nova.virt.hardware [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 857.572800] env[61974]: DEBUG nova.virt.hardware [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 857.572800] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ddf7303-cd49-447d-bd58-8739032cbb90 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.583586] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7abc78-b353-4be5-b7f8-1958a3c70230 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.635846] env[61974]: DEBUG nova.compute.manager [req-fb75f6d3-be65-44df-ae44-c72322138ced req-afd61c28-40c4-49e7-82d1-af6010792baa service nova] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Received event network-changed-5fd1f426-dc0b-4645-af65-d7b50ec60cfb {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 857.636210] env[61974]: DEBUG nova.compute.manager [req-fb75f6d3-be65-44df-ae44-c72322138ced req-afd61c28-40c4-49e7-82d1-af6010792baa service nova] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Refreshing instance network info cache due to event network-changed-5fd1f426-dc0b-4645-af65-d7b50ec60cfb. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 857.636549] env[61974]: DEBUG oslo_concurrency.lockutils [req-fb75f6d3-be65-44df-ae44-c72322138ced req-afd61c28-40c4-49e7-82d1-af6010792baa service nova] Acquiring lock "refresh_cache-4545e438-8784-4911-bf2e-8eb14d38c308" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.636683] env[61974]: DEBUG oslo_concurrency.lockutils [req-fb75f6d3-be65-44df-ae44-c72322138ced req-afd61c28-40c4-49e7-82d1-af6010792baa service nova] Acquired lock "refresh_cache-4545e438-8784-4911-bf2e-8eb14d38c308" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.636893] env[61974]: DEBUG nova.network.neutron [req-fb75f6d3-be65-44df-ae44-c72322138ced req-afd61c28-40c4-49e7-82d1-af6010792baa service nova] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Refreshing network info cache for port 5fd1f426-dc0b-4645-af65-d7b50ec60cfb {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 857.876968] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.912399] env[61974]: ERROR nova.compute.manager [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5fd1f426-dc0b-4645-af65-d7b50ec60cfb, please check neutron logs for more information. [ 857.912399] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 857.912399] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 857.912399] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 857.912399] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 857.912399] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 857.912399] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 857.912399] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 857.912399] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 857.912399] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 857.912399] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 857.912399] env[61974]: ERROR nova.compute.manager raise self.value [ 857.912399] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 857.912399] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 857.912399] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 857.912399] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 857.912789] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 857.912789] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 857.912789] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5fd1f426-dc0b-4645-af65-d7b50ec60cfb, please check neutron logs for more information. [ 857.912789] env[61974]: ERROR nova.compute.manager [ 857.912789] env[61974]: Traceback (most recent call last): [ 857.912789] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 857.912789] env[61974]: listener.cb(fileno) [ 857.912789] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 857.912789] env[61974]: result = function(*args, **kwargs) [ 857.912789] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 857.912789] env[61974]: return func(*args, **kwargs) [ 857.912789] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 857.912789] env[61974]: raise e [ 857.912789] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 857.912789] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 857.912789] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 857.912789] env[61974]: created_port_ids = self._update_ports_for_instance( [ 857.912789] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 857.912789] env[61974]: with excutils.save_and_reraise_exception(): [ 857.912789] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 857.912789] env[61974]: self.force_reraise() [ 857.912789] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 857.912789] env[61974]: raise self.value [ 857.912789] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 857.912789] env[61974]: updated_port = self._update_port( [ 857.912789] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 857.912789] env[61974]: _ensure_no_port_binding_failure(port) [ 857.912789] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 857.912789] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 857.913477] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 5fd1f426-dc0b-4645-af65-d7b50ec60cfb, please check neutron logs for more information. [ 857.913477] env[61974]: Removing descriptor: 21 [ 857.913477] env[61974]: ERROR nova.compute.manager [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5fd1f426-dc0b-4645-af65-d7b50ec60cfb, please check neutron logs for more information. [ 857.913477] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Traceback (most recent call last): [ 857.913477] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 857.913477] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] yield resources [ 857.913477] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 857.913477] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] self.driver.spawn(context, instance, image_meta, [ 857.913477] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 857.913477] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] self._vmops.spawn(context, instance, image_meta, injected_files, [ 857.913477] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 857.913477] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] vm_ref = self.build_virtual_machine(instance, [ 857.913754] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 857.913754] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] vif_infos = vmwarevif.get_vif_info(self._session, [ 857.913754] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 857.913754] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] for vif in network_info: [ 857.913754] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 857.913754] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] return self._sync_wrapper(fn, *args, **kwargs) [ 857.913754] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 857.913754] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] self.wait() [ 857.913754] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 857.913754] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] self[:] = self._gt.wait() [ 857.913754] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 857.913754] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] return self._exit_event.wait() [ 857.913754] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 857.914092] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] result = hub.switch() [ 857.914092] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 857.914092] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] return self.greenlet.switch() [ 857.914092] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 857.914092] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] result = function(*args, **kwargs) [ 857.914092] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 857.914092] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] return func(*args, **kwargs) [ 857.914092] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 857.914092] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] raise e [ 857.914092] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 857.914092] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] nwinfo = self.network_api.allocate_for_instance( [ 857.914092] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 857.914092] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] created_port_ids = self._update_ports_for_instance( [ 857.914430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 857.914430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] with excutils.save_and_reraise_exception(): [ 857.914430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 857.914430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] self.force_reraise() [ 857.914430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 857.914430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] raise self.value [ 857.914430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 857.914430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] updated_port = self._update_port( [ 857.914430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 857.914430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] _ensure_no_port_binding_failure(port) [ 857.914430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 857.914430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] raise exception.PortBindingFailed(port_id=port['id']) [ 857.914704] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] nova.exception.PortBindingFailed: Binding failed for port 5fd1f426-dc0b-4645-af65-d7b50ec60cfb, please check neutron logs for more information. [ 857.914704] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] [ 857.914704] env[61974]: INFO nova.compute.manager [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Terminating instance [ 857.918627] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Acquiring lock "refresh_cache-4545e438-8784-4911-bf2e-8eb14d38c308" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.978255] env[61974]: DEBUG nova.network.neutron [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 858.183023] env[61974]: DEBUG nova.network.neutron [req-fb75f6d3-be65-44df-ae44-c72322138ced req-afd61c28-40c4-49e7-82d1-af6010792baa service nova] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 858.342700] env[61974]: DEBUG nova.network.neutron [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.373209] env[61974]: DEBUG nova.network.neutron [req-fb75f6d3-be65-44df-ae44-c72322138ced req-afd61c28-40c4-49e7-82d1-af6010792baa service nova] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.799665] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99759a20-e156-48ea-9089-e936d6e746d3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.808100] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49cba9ba-3e87-4dfc-b58e-b7b59edc8249 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.845462] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1add896a-e183-4df7-94f1-0aba72444f19 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.852967] env[61974]: DEBUG oslo_concurrency.lockutils [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Releasing lock "refresh_cache-0de509f4-48d8-43ae-9551-80ae414d7c8e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.853220] env[61974]: DEBUG nova.compute.manager [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 858.853400] env[61974]: DEBUG nova.compute.manager [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 858.853561] env[61974]: DEBUG nova.network.neutron [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 858.856240] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f74139-1bb3-4935-a1fc-6287367cf00c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.870547] env[61974]: DEBUG nova.compute.provider_tree [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 858.878894] env[61974]: DEBUG oslo_concurrency.lockutils [req-fb75f6d3-be65-44df-ae44-c72322138ced req-afd61c28-40c4-49e7-82d1-af6010792baa service nova] Releasing lock "refresh_cache-4545e438-8784-4911-bf2e-8eb14d38c308" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.879447] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Acquired lock "refresh_cache-4545e438-8784-4911-bf2e-8eb14d38c308" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.879755] env[61974]: DEBUG nova.network.neutron [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 858.885639] env[61974]: DEBUG nova.network.neutron [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 859.374307] env[61974]: DEBUG nova.scheduler.client.report [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 859.390262] env[61974]: DEBUG nova.network.neutron [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.407745] env[61974]: DEBUG nova.network.neutron [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 859.604446] env[61974]: DEBUG nova.network.neutron [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.717823] env[61974]: DEBUG nova.compute.manager [req-b08c407d-c154-463c-a445-2dbd54d54350 req-b535bbf7-22db-4ce0-8503-c0aa90f15f0b service nova] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Received event network-vif-deleted-5fd1f426-dc0b-4645-af65-d7b50ec60cfb {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 859.881200] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.434s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.882028] env[61974]: DEBUG nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 859.885049] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 15.132s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.892507] env[61974]: INFO nova.compute.manager [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] [instance: 0de509f4-48d8-43ae-9551-80ae414d7c8e] Took 1.04 seconds to deallocate network for instance. [ 860.109429] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Releasing lock "refresh_cache-4545e438-8784-4911-bf2e-8eb14d38c308" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.109887] env[61974]: DEBUG nova.compute.manager [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 860.110146] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 860.110488] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3564088f-61d6-44db-8895-ee36afe62c3a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.120923] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d018ca-ffd2-4c0e-b059-0b1d91d86738 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.147985] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4545e438-8784-4911-bf2e-8eb14d38c308 could not be found. [ 860.148538] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 860.148959] env[61974]: INFO nova.compute.manager [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Took 0.04 seconds to destroy the instance on the hypervisor. [ 860.149311] env[61974]: DEBUG oslo.service.loopingcall [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 860.149606] env[61974]: DEBUG nova.compute.manager [-] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 860.149742] env[61974]: DEBUG nova.network.neutron [-] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 860.170166] env[61974]: DEBUG nova.network.neutron [-] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 860.399303] env[61974]: DEBUG nova.compute.utils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 860.405667] env[61974]: DEBUG nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 860.405849] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 860.518509] env[61974]: DEBUG nova.policy [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a1f9bf9b2b848c096b4aa88cf9035fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e8153f21f085460db2d0328196e2f347', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 860.673103] env[61974]: DEBUG nova.network.neutron [-] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.911022] env[61974]: DEBUG nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 860.917702] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Successfully created port: a1a08ac6-cfac-4b8a-960d-1160b22d84fc {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 860.954418] env[61974]: INFO nova.scheduler.client.report [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Deleted allocations for instance 0de509f4-48d8-43ae-9551-80ae414d7c8e [ 861.179386] env[61974]: INFO nova.compute.manager [-] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Took 1.03 seconds to deallocate network for instance. [ 861.181811] env[61974]: DEBUG nova.compute.claims [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 861.181991] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.435763] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 0de509f4-48d8-43ae-9551-80ae414d7c8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 861.436931] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 5f62ae7a-126f-42ce-9579-57ca02c871d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 861.436931] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 4545e438-8784-4911-bf2e-8eb14d38c308 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 861.436931] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance e9309651-2fcb-40ad-babb-950042fe68f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 861.468484] env[61974]: DEBUG oslo_concurrency.lockutils [None req-82c2065a-b863-48fd-9267-106525ac4652 tempest-SecurityGroupsTestJSON-1659507719 tempest-SecurityGroupsTestJSON-1659507719-project-member] Lock "0de509f4-48d8-43ae-9551-80ae414d7c8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 133.190s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.839147] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.839147] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.852137] env[61974]: DEBUG nova.compute.manager [req-f441980f-2620-433e-8c34-d63a601db95d req-6f982dc0-ffee-48fd-9f02-d899c2f4e4cd service nova] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Received event network-changed-a1a08ac6-cfac-4b8a-960d-1160b22d84fc {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 861.852137] env[61974]: DEBUG nova.compute.manager [req-f441980f-2620-433e-8c34-d63a601db95d req-6f982dc0-ffee-48fd-9f02-d899c2f4e4cd service nova] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Refreshing instance network info cache due to event network-changed-a1a08ac6-cfac-4b8a-960d-1160b22d84fc. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 861.852137] env[61974]: DEBUG oslo_concurrency.lockutils [req-f441980f-2620-433e-8c34-d63a601db95d req-6f982dc0-ffee-48fd-9f02-d899c2f4e4cd service nova] Acquiring lock "refresh_cache-e9309651-2fcb-40ad-babb-950042fe68f9" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.852137] env[61974]: DEBUG oslo_concurrency.lockutils [req-f441980f-2620-433e-8c34-d63a601db95d req-6f982dc0-ffee-48fd-9f02-d899c2f4e4cd service nova] Acquired lock "refresh_cache-e9309651-2fcb-40ad-babb-950042fe68f9" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.852137] env[61974]: DEBUG nova.network.neutron [req-f441980f-2620-433e-8c34-d63a601db95d req-6f982dc0-ffee-48fd-9f02-d899c2f4e4cd service nova] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Refreshing network info cache for port a1a08ac6-cfac-4b8a-960d-1160b22d84fc {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 861.926930] env[61974]: DEBUG nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 861.942068] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance b2d442b2-1927-481c-a232-8514444004a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 861.959559] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 861.959559] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 861.959559] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 861.959785] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 861.960574] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 861.960839] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 861.961131] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 861.961357] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 861.961628] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 861.962068] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 861.962068] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 861.963443] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb17d0cb-96ba-4d98-82f0-7ab47a489326 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.973534] env[61974]: DEBUG nova.compute.manager [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 861.981043] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9de1e68-6995-4e84-8c05-ea7f9d2552b5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.126136] env[61974]: ERROR nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a1a08ac6-cfac-4b8a-960d-1160b22d84fc, please check neutron logs for more information. [ 862.126136] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 862.126136] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 862.126136] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 862.126136] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 862.126136] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 862.126136] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 862.126136] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 862.126136] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 862.126136] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 862.126136] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 862.126136] env[61974]: ERROR nova.compute.manager raise self.value [ 862.126136] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 862.126136] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 862.126136] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 862.126136] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 862.126592] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 862.126592] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 862.126592] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a1a08ac6-cfac-4b8a-960d-1160b22d84fc, please check neutron logs for more information. [ 862.126592] env[61974]: ERROR nova.compute.manager [ 862.126592] env[61974]: Traceback (most recent call last): [ 862.126592] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 862.126592] env[61974]: listener.cb(fileno) [ 862.126592] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 862.126592] env[61974]: result = function(*args, **kwargs) [ 862.126592] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 862.126592] env[61974]: return func(*args, **kwargs) [ 862.126592] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 862.126592] env[61974]: raise e [ 862.126592] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 862.126592] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 862.126592] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 862.126592] env[61974]: created_port_ids = self._update_ports_for_instance( [ 862.126592] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 862.126592] env[61974]: with excutils.save_and_reraise_exception(): [ 862.126592] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 862.126592] env[61974]: self.force_reraise() [ 862.126592] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 862.126592] env[61974]: raise self.value [ 862.126592] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 862.126592] env[61974]: updated_port = self._update_port( [ 862.126592] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 862.126592] env[61974]: _ensure_no_port_binding_failure(port) [ 862.126592] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 862.126592] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 862.127228] env[61974]: nova.exception.PortBindingFailed: Binding failed for port a1a08ac6-cfac-4b8a-960d-1160b22d84fc, please check neutron logs for more information. [ 862.127228] env[61974]: Removing descriptor: 20 [ 862.127228] env[61974]: ERROR nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a1a08ac6-cfac-4b8a-960d-1160b22d84fc, please check neutron logs for more information. [ 862.127228] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Traceback (most recent call last): [ 862.127228] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 862.127228] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] yield resources [ 862.127228] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 862.127228] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] self.driver.spawn(context, instance, image_meta, [ 862.127228] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 862.127228] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 862.127228] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 862.127228] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] vm_ref = self.build_virtual_machine(instance, [ 862.127495] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 862.127495] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] vif_infos = vmwarevif.get_vif_info(self._session, [ 862.127495] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 862.127495] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] for vif in network_info: [ 862.127495] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 862.127495] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] return self._sync_wrapper(fn, *args, **kwargs) [ 862.127495] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 862.127495] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] self.wait() [ 862.127495] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 862.127495] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] self[:] = self._gt.wait() [ 862.127495] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 862.127495] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] return self._exit_event.wait() [ 862.127495] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 862.127893] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] result = hub.switch() [ 862.127893] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 862.127893] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] return self.greenlet.switch() [ 862.127893] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 862.127893] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] result = function(*args, **kwargs) [ 862.127893] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 862.127893] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] return func(*args, **kwargs) [ 862.127893] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 862.127893] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] raise e [ 862.127893] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 862.127893] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] nwinfo = self.network_api.allocate_for_instance( [ 862.127893] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 862.127893] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] created_port_ids = self._update_ports_for_instance( [ 862.128233] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 862.128233] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] with excutils.save_and_reraise_exception(): [ 862.128233] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 862.128233] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] self.force_reraise() [ 862.128233] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 862.128233] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] raise self.value [ 862.128233] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 862.128233] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] updated_port = self._update_port( [ 862.128233] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 862.128233] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] _ensure_no_port_binding_failure(port) [ 862.128233] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 862.128233] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] raise exception.PortBindingFailed(port_id=port['id']) [ 862.128511] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] nova.exception.PortBindingFailed: Binding failed for port a1a08ac6-cfac-4b8a-960d-1160b22d84fc, please check neutron logs for more information. [ 862.128511] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] [ 862.128511] env[61974]: INFO nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Terminating instance [ 862.129237] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "refresh_cache-e9309651-2fcb-40ad-babb-950042fe68f9" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.448510] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance db03b815-295a-4a66-9afd-a1f4ba97601f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 862.506784] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.538146] env[61974]: DEBUG nova.network.neutron [req-f441980f-2620-433e-8c34-d63a601db95d req-6f982dc0-ffee-48fd-9f02-d899c2f4e4cd service nova] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 862.637947] env[61974]: DEBUG nova.network.neutron [req-f441980f-2620-433e-8c34-d63a601db95d req-6f982dc0-ffee-48fd-9f02-d899c2f4e4cd service nova] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.951130] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 5013beda-7f34-44fe-9159-f04e0aca5bce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 863.141429] env[61974]: DEBUG oslo_concurrency.lockutils [req-f441980f-2620-433e-8c34-d63a601db95d req-6f982dc0-ffee-48fd-9f02-d899c2f4e4cd service nova] Releasing lock "refresh_cache-e9309651-2fcb-40ad-babb-950042fe68f9" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.142085] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquired lock "refresh_cache-e9309651-2fcb-40ad-babb-950042fe68f9" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.142387] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 863.453529] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance b62397bb-95b4-4d07-819a-bfcfd7c6a38e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 863.673142] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 863.763698] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.875006] env[61974]: DEBUG nova.compute.manager [req-a68efc99-e1fd-45f2-ad1d-5ccbfd26035b req-344d5e82-5d23-4c8d-b99f-b9c3db7a8caa service nova] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Received event network-vif-deleted-a1a08ac6-cfac-4b8a-960d-1160b22d84fc {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 863.957224] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 11d4f981-b167-4c81-9cd7-7e939606d400 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.265916] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Releasing lock "refresh_cache-e9309651-2fcb-40ad-babb-950042fe68f9" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.266402] env[61974]: DEBUG nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 864.266633] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 864.266970] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1b5f405e-c511-4612-a67a-700f841bdcdc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.276457] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c42c9b-e324-422c-950d-e15d1c79f0c7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.300237] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e9309651-2fcb-40ad-babb-950042fe68f9 could not be found. [ 864.300499] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 864.300704] env[61974]: INFO nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Took 0.03 seconds to destroy the instance on the hypervisor. [ 864.300963] env[61974]: DEBUG oslo.service.loopingcall [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 864.301218] env[61974]: DEBUG nova.compute.manager [-] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 864.301312] env[61974]: DEBUG nova.network.neutron [-] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 864.316553] env[61974]: DEBUG nova.network.neutron [-] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 864.459850] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 0a62f878-43c1-4aaf-9054-798572b4faa7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.819984] env[61974]: DEBUG nova.network.neutron [-] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.963862] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 68794d97-95f7-4612-9f9f-e370afb3d852 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 865.322920] env[61974]: INFO nova.compute.manager [-] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Took 1.02 seconds to deallocate network for instance. [ 865.325888] env[61974]: DEBUG nova.compute.claims [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 865.326088] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.466281] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance a9edbd98-3e67-476b-934d-15d893a62d02 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 865.970674] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 635f362a-582e-44bc-85d8-8a69943982b0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 866.473510] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance f88f0ef2-24f2-4eef-92a3-8de2ebb6944a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 866.975855] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance f0601d26-4e29-4946-bb52-50e2a2163535 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 867.479745] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 1c1404fd-a954-4849-883b-7898a7e87e2b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 867.982046] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.485205] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance b1fa5433-8f26-48db-a19d-d1e11245fb44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.701515] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "59c72be0-46de-4cb8-93d6-0a2c70c90e2e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.701753] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "59c72be0-46de-4cb8-93d6-0a2c70c90e2e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.988202] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 7b338210-5be8-4838-b815-8f2c6cc19ccd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 869.491490] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 0ce75511-290c-4fea-9657-dfdd8d9efc4b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 869.994572] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 097ad079-9712-4183-9135-b15ad3a65d6d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 870.497532] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 45fda940-b7f0-410c-b31a-b5cd365c28fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 870.497847] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 870.498019] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 870.755939] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a7dfc2-284b-4c1a-8ba2-39250b7c821e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.763527] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cab2527-dc51-454d-a44e-23f4bbdbd296 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.792299] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c71f07-0335-4907-89f9-a36840c33bf2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.799059] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-011951c9-26dd-4647-9ae9-c919cd9a9d58 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.811759] env[61974]: DEBUG nova.compute.provider_tree [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.315348] env[61974]: DEBUG nova.scheduler.client.report [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 871.820643] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 871.820892] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 11.936s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.821245] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.615s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.822686] env[61974]: INFO nova.compute.claims [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 871.825180] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 871.825367] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Cleaning up deleted instances {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 872.331464] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] There are 4 instances to clean {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 872.331464] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 59b1ad04-c949-4b07-af77-f84f842dd9ee] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 872.836400] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: f6b76518-d691-4e4f-861a-624a1684e564] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 873.106765] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea57401-7c28-4804-a095-de8d0218f9ac {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.114641] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f807f0-ecb0-47d0-861e-7287fc6220d6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.145619] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c66b8f4-9568-4743-a6fd-baf0d4d3d3e1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.152942] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b3861e-f29d-4bcf-9544-21e78a7978ed {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.166112] env[61974]: DEBUG nova.compute.provider_tree [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 873.339866] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 93a736b5-5423-4378-8b0c-73a0c46414ca] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 873.669038] env[61974]: DEBUG nova.scheduler.client.report [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 873.844883] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 41fccade-6e5f-4642-8889-2ce00dbff1c7] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 874.173507] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.352s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.173984] env[61974]: DEBUG nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 874.176641] env[61974]: DEBUG oslo_concurrency.lockutils [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.165s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.178318] env[61974]: INFO nova.compute.claims [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 874.347482] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 874.347482] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Cleaning up deleted instances with incomplete migration {{(pid=61974) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 874.683052] env[61974]: DEBUG nova.compute.utils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 874.686082] env[61974]: DEBUG nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 874.686251] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 874.737227] env[61974]: DEBUG nova.policy [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a1f9bf9b2b848c096b4aa88cf9035fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e8153f21f085460db2d0328196e2f347', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 874.850623] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 875.001907] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Successfully created port: ad9c07ae-44e6-4765-9c75-e7304a144938 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 875.188958] env[61974]: DEBUG nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 875.468158] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5bd420-5679-43ba-8cbe-44d47ca4e76d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.476387] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a055ee21-b313-4fc6-a96e-ef5cc9ba545c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.510639] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c61e4a0-a5f2-4e59-a50d-6a199a7baaa2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.520018] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f54607-319e-43ad-88a5-08cdee4769a3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.533429] env[61974]: DEBUG nova.compute.provider_tree [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 875.751077] env[61974]: DEBUG nova.compute.manager [req-f2d126b2-2d6c-4212-b8d5-0d31946546d4 req-912814e9-354f-446e-918c-2f21464f075f service nova] [instance: b2d442b2-1927-481c-a232-8514444004a7] Received event network-changed-ad9c07ae-44e6-4765-9c75-e7304a144938 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 875.751077] env[61974]: DEBUG nova.compute.manager [req-f2d126b2-2d6c-4212-b8d5-0d31946546d4 req-912814e9-354f-446e-918c-2f21464f075f service nova] [instance: b2d442b2-1927-481c-a232-8514444004a7] Refreshing instance network info cache due to event network-changed-ad9c07ae-44e6-4765-9c75-e7304a144938. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 875.751077] env[61974]: DEBUG oslo_concurrency.lockutils [req-f2d126b2-2d6c-4212-b8d5-0d31946546d4 req-912814e9-354f-446e-918c-2f21464f075f service nova] Acquiring lock "refresh_cache-b2d442b2-1927-481c-a232-8514444004a7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.751077] env[61974]: DEBUG oslo_concurrency.lockutils [req-f2d126b2-2d6c-4212-b8d5-0d31946546d4 req-912814e9-354f-446e-918c-2f21464f075f service nova] Acquired lock "refresh_cache-b2d442b2-1927-481c-a232-8514444004a7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.751077] env[61974]: DEBUG nova.network.neutron [req-f2d126b2-2d6c-4212-b8d5-0d31946546d4 req-912814e9-354f-446e-918c-2f21464f075f service nova] [instance: b2d442b2-1927-481c-a232-8514444004a7] Refreshing network info cache for port ad9c07ae-44e6-4765-9c75-e7304a144938 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 875.943895] env[61974]: ERROR nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ad9c07ae-44e6-4765-9c75-e7304a144938, please check neutron logs for more information. [ 875.943895] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 875.943895] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 875.943895] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 875.943895] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 875.943895] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 875.943895] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 875.943895] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 875.943895] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 875.943895] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 875.943895] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 875.943895] env[61974]: ERROR nova.compute.manager raise self.value [ 875.943895] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 875.943895] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 875.943895] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 875.943895] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 875.944355] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 875.944355] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 875.944355] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ad9c07ae-44e6-4765-9c75-e7304a144938, please check neutron logs for more information. [ 875.944355] env[61974]: ERROR nova.compute.manager [ 875.944355] env[61974]: Traceback (most recent call last): [ 875.944355] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 875.944355] env[61974]: listener.cb(fileno) [ 875.944355] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 875.944355] env[61974]: result = function(*args, **kwargs) [ 875.944355] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 875.944355] env[61974]: return func(*args, **kwargs) [ 875.944355] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 875.944355] env[61974]: raise e [ 875.944355] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 875.944355] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 875.944355] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 875.944355] env[61974]: created_port_ids = self._update_ports_for_instance( [ 875.944355] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 875.944355] env[61974]: with excutils.save_and_reraise_exception(): [ 875.944355] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 875.944355] env[61974]: self.force_reraise() [ 875.944355] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 875.944355] env[61974]: raise self.value [ 875.944355] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 875.944355] env[61974]: updated_port = self._update_port( [ 875.944355] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 875.944355] env[61974]: _ensure_no_port_binding_failure(port) [ 875.944355] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 875.944355] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 875.945131] env[61974]: nova.exception.PortBindingFailed: Binding failed for port ad9c07ae-44e6-4765-9c75-e7304a144938, please check neutron logs for more information. [ 875.945131] env[61974]: Removing descriptor: 20 [ 876.037208] env[61974]: DEBUG nova.scheduler.client.report [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 876.201811] env[61974]: DEBUG nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 876.225962] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 876.226238] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 876.226396] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 876.226578] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 876.226726] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 876.226873] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 876.227108] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 876.227270] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 876.227434] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 876.227593] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 876.227762] env[61974]: DEBUG nova.virt.hardware [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 876.228648] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5634b42f-fe8a-4220-979a-6c384fb4d83c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.236310] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c4582b7-9fc6-4ef2-acbe-046938bd841b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.250010] env[61974]: ERROR nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ad9c07ae-44e6-4765-9c75-e7304a144938, please check neutron logs for more information. [ 876.250010] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] Traceback (most recent call last): [ 876.250010] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 876.250010] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] yield resources [ 876.250010] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 876.250010] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] self.driver.spawn(context, instance, image_meta, [ 876.250010] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 876.250010] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 876.250010] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 876.250010] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] vm_ref = self.build_virtual_machine(instance, [ 876.250010] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 876.250351] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] vif_infos = vmwarevif.get_vif_info(self._session, [ 876.250351] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 876.250351] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] for vif in network_info: [ 876.250351] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 876.250351] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] return self._sync_wrapper(fn, *args, **kwargs) [ 876.250351] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 876.250351] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] self.wait() [ 876.250351] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 876.250351] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] self[:] = self._gt.wait() [ 876.250351] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 876.250351] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] return self._exit_event.wait() [ 876.250351] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 876.250351] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] current.throw(*self._exc) [ 876.250671] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 876.250671] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] result = function(*args, **kwargs) [ 876.250671] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 876.250671] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] return func(*args, **kwargs) [ 876.250671] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 876.250671] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] raise e [ 876.250671] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 876.250671] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] nwinfo = self.network_api.allocate_for_instance( [ 876.250671] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 876.250671] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] created_port_ids = self._update_ports_for_instance( [ 876.250671] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 876.250671] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] with excutils.save_and_reraise_exception(): [ 876.250671] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 876.251074] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] self.force_reraise() [ 876.251074] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 876.251074] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] raise self.value [ 876.251074] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 876.251074] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] updated_port = self._update_port( [ 876.251074] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 876.251074] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] _ensure_no_port_binding_failure(port) [ 876.251074] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 876.251074] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] raise exception.PortBindingFailed(port_id=port['id']) [ 876.251074] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] nova.exception.PortBindingFailed: Binding failed for port ad9c07ae-44e6-4765-9c75-e7304a144938, please check neutron logs for more information. [ 876.251074] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] [ 876.251074] env[61974]: INFO nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Terminating instance [ 876.253058] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "refresh_cache-b2d442b2-1927-481c-a232-8514444004a7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 876.268616] env[61974]: DEBUG nova.network.neutron [req-f2d126b2-2d6c-4212-b8d5-0d31946546d4 req-912814e9-354f-446e-918c-2f21464f075f service nova] [instance: b2d442b2-1927-481c-a232-8514444004a7] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 876.356750] env[61974]: DEBUG nova.network.neutron [req-f2d126b2-2d6c-4212-b8d5-0d31946546d4 req-912814e9-354f-446e-918c-2f21464f075f service nova] [instance: b2d442b2-1927-481c-a232-8514444004a7] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.542790] env[61974]: DEBUG oslo_concurrency.lockutils [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.366s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.543115] env[61974]: DEBUG nova.compute.manager [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 876.545663] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 25.426s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.860348] env[61974]: DEBUG oslo_concurrency.lockutils [req-f2d126b2-2d6c-4212-b8d5-0d31946546d4 req-912814e9-354f-446e-918c-2f21464f075f service nova] Releasing lock "refresh_cache-b2d442b2-1927-481c-a232-8514444004a7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 876.860801] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquired lock "refresh_cache-b2d442b2-1927-481c-a232-8514444004a7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.860996] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 877.054549] env[61974]: DEBUG nova.compute.utils [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 877.056213] env[61974]: DEBUG nova.compute.manager [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 877.056458] env[61974]: DEBUG nova.network.neutron [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 877.106693] env[61974]: DEBUG nova.policy [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '139f2fab7d4c492ab0d6fb16ea947457', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4757d24b61794cfcaefff2ad44e02b74', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 877.366152] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027bb4aa-30f6-4a51-855e-b1aee27730e8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.374031] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97235d60-fd3b-4a4f-b146-66a31d944405 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.409990] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 877.414020] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba106b0e-6d7b-4568-b54d-2446e4aed7d2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.420584] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-febb83eb-f3d9-430a-8596-3cd6a33e856f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.437252] env[61974]: DEBUG nova.compute.provider_tree [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 877.493120] env[61974]: DEBUG nova.network.neutron [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Successfully created port: e078359d-2401-4255-a671-abea7c89c9ed {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 877.559332] env[61974]: DEBUG nova.compute.manager [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 877.611274] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.806109] env[61974]: DEBUG nova.compute.manager [req-057fdce0-6cca-4657-aab0-9500aaee0235 req-569a6739-f8a6-441a-907b-687a3ceeb3c1 service nova] [instance: b2d442b2-1927-481c-a232-8514444004a7] Received event network-vif-deleted-ad9c07ae-44e6-4765-9c75-e7304a144938 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 877.940277] env[61974]: DEBUG nova.scheduler.client.report [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 878.113929] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Releasing lock "refresh_cache-b2d442b2-1927-481c-a232-8514444004a7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.114371] env[61974]: DEBUG nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 878.114562] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 878.114868] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e70d227a-d2f4-4825-a607-beecdc4e99fb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.125756] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16192fd5-74f5-41a8-abfe-d542abace1b6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.153351] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b2d442b2-1927-481c-a232-8514444004a7 could not be found. [ 878.153573] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 878.153757] env[61974]: INFO nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 878.153999] env[61974]: DEBUG oslo.service.loopingcall [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 878.154265] env[61974]: DEBUG nova.compute.manager [-] [instance: b2d442b2-1927-481c-a232-8514444004a7] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 878.154325] env[61974]: DEBUG nova.network.neutron [-] [instance: b2d442b2-1927-481c-a232-8514444004a7] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 878.172903] env[61974]: DEBUG nova.network.neutron [-] [instance: b2d442b2-1927-481c-a232-8514444004a7] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 878.445567] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.900s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.446207] env[61974]: ERROR nova.compute.manager [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f1ba04af-31ec-4ecc-8250-9b2f424023ce, please check neutron logs for more information. [ 878.446207] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Traceback (most recent call last): [ 878.446207] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 878.446207] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] self.driver.spawn(context, instance, image_meta, [ 878.446207] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 878.446207] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 878.446207] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 878.446207] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] vm_ref = self.build_virtual_machine(instance, [ 878.446207] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 878.446207] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] vif_infos = vmwarevif.get_vif_info(self._session, [ 878.446207] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 878.446550] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] for vif in network_info: [ 878.446550] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 878.446550] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] return self._sync_wrapper(fn, *args, **kwargs) [ 878.446550] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 878.446550] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] self.wait() [ 878.446550] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 878.446550] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] self[:] = self._gt.wait() [ 878.446550] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 878.446550] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] return self._exit_event.wait() [ 878.446550] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 878.446550] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] result = hub.switch() [ 878.446550] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 878.446550] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] return self.greenlet.switch() [ 878.446868] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 878.446868] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] result = function(*args, **kwargs) [ 878.446868] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 878.446868] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] return func(*args, **kwargs) [ 878.446868] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 878.446868] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] raise e [ 878.446868] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 878.446868] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] nwinfo = self.network_api.allocate_for_instance( [ 878.446868] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 878.446868] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] created_port_ids = self._update_ports_for_instance( [ 878.446868] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 878.446868] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] with excutils.save_and_reraise_exception(): [ 878.446868] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 878.447161] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] self.force_reraise() [ 878.447161] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 878.447161] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] raise self.value [ 878.447161] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 878.447161] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] updated_port = self._update_port( [ 878.447161] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 878.447161] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] _ensure_no_port_binding_failure(port) [ 878.447161] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 878.447161] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] raise exception.PortBindingFailed(port_id=port['id']) [ 878.447161] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] nova.exception.PortBindingFailed: Binding failed for port f1ba04af-31ec-4ecc-8250-9b2f424023ce, please check neutron logs for more information. [ 878.447161] env[61974]: ERROR nova.compute.manager [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] [ 878.447416] env[61974]: DEBUG nova.compute.utils [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Binding failed for port f1ba04af-31ec-4ecc-8250-9b2f424023ce, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 878.448048] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.594s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.449541] env[61974]: INFO nova.compute.claims [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 878.452054] env[61974]: DEBUG nova.compute.manager [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Build of instance 5f62ae7a-126f-42ce-9579-57ca02c871d8 was re-scheduled: Binding failed for port f1ba04af-31ec-4ecc-8250-9b2f424023ce, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 878.452483] env[61974]: DEBUG nova.compute.manager [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 878.452707] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "refresh_cache-5f62ae7a-126f-42ce-9579-57ca02c871d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.452853] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquired lock "refresh_cache-5f62ae7a-126f-42ce-9579-57ca02c871d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.453029] env[61974]: DEBUG nova.network.neutron [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 878.569553] env[61974]: DEBUG nova.compute.manager [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 878.593687] env[61974]: DEBUG nova.virt.hardware [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 878.593981] env[61974]: DEBUG nova.virt.hardware [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 878.594156] env[61974]: DEBUG nova.virt.hardware [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 878.594335] env[61974]: DEBUG nova.virt.hardware [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 878.594516] env[61974]: DEBUG nova.virt.hardware [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 878.594714] env[61974]: DEBUG nova.virt.hardware [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 878.594942] env[61974]: DEBUG nova.virt.hardware [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 878.595118] env[61974]: DEBUG nova.virt.hardware [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 878.595288] env[61974]: DEBUG nova.virt.hardware [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 878.595457] env[61974]: DEBUG nova.virt.hardware [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 878.595632] env[61974]: DEBUG nova.virt.hardware [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 878.596575] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9df4c0-55a4-4b65-8a3c-3d58b59f364d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.605612] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff0c0b41-5949-495f-b5db-e6138a691e79 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.626196] env[61974]: ERROR nova.compute.manager [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e078359d-2401-4255-a671-abea7c89c9ed, please check neutron logs for more information. [ 878.626196] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 878.626196] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 878.626196] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 878.626196] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 878.626196] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 878.626196] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 878.626196] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 878.626196] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 878.626196] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 878.626196] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 878.626196] env[61974]: ERROR nova.compute.manager raise self.value [ 878.626196] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 878.626196] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 878.626196] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 878.626196] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 878.626853] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 878.626853] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 878.626853] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e078359d-2401-4255-a671-abea7c89c9ed, please check neutron logs for more information. [ 878.626853] env[61974]: ERROR nova.compute.manager [ 878.626853] env[61974]: Traceback (most recent call last): [ 878.626853] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 878.626853] env[61974]: listener.cb(fileno) [ 878.626853] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 878.626853] env[61974]: result = function(*args, **kwargs) [ 878.626853] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 878.626853] env[61974]: return func(*args, **kwargs) [ 878.626853] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 878.626853] env[61974]: raise e [ 878.626853] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 878.626853] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 878.626853] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 878.626853] env[61974]: created_port_ids = self._update_ports_for_instance( [ 878.626853] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 878.626853] env[61974]: with excutils.save_and_reraise_exception(): [ 878.626853] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 878.626853] env[61974]: self.force_reraise() [ 878.626853] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 878.626853] env[61974]: raise self.value [ 878.626853] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 878.626853] env[61974]: updated_port = self._update_port( [ 878.626853] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 878.626853] env[61974]: _ensure_no_port_binding_failure(port) [ 878.626853] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 878.626853] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 878.627656] env[61974]: nova.exception.PortBindingFailed: Binding failed for port e078359d-2401-4255-a671-abea7c89c9ed, please check neutron logs for more information. [ 878.627656] env[61974]: Removing descriptor: 20 [ 878.627656] env[61974]: ERROR nova.compute.manager [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e078359d-2401-4255-a671-abea7c89c9ed, please check neutron logs for more information. [ 878.627656] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Traceback (most recent call last): [ 878.627656] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 878.627656] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] yield resources [ 878.627656] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 878.627656] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] self.driver.spawn(context, instance, image_meta, [ 878.627656] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 878.627656] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 878.627656] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 878.627656] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] vm_ref = self.build_virtual_machine(instance, [ 878.628044] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 878.628044] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] vif_infos = vmwarevif.get_vif_info(self._session, [ 878.628044] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 878.628044] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] for vif in network_info: [ 878.628044] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 878.628044] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] return self._sync_wrapper(fn, *args, **kwargs) [ 878.628044] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 878.628044] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] self.wait() [ 878.628044] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 878.628044] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] self[:] = self._gt.wait() [ 878.628044] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 878.628044] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] return self._exit_event.wait() [ 878.628044] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 878.628447] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] result = hub.switch() [ 878.628447] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 878.628447] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] return self.greenlet.switch() [ 878.628447] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 878.628447] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] result = function(*args, **kwargs) [ 878.628447] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 878.628447] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] return func(*args, **kwargs) [ 878.628447] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 878.628447] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] raise e [ 878.628447] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 878.628447] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] nwinfo = self.network_api.allocate_for_instance( [ 878.628447] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 878.628447] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] created_port_ids = self._update_ports_for_instance( [ 878.628758] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 878.628758] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] with excutils.save_and_reraise_exception(): [ 878.628758] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 878.628758] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] self.force_reraise() [ 878.628758] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 878.628758] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] raise self.value [ 878.628758] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 878.628758] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] updated_port = self._update_port( [ 878.628758] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 878.628758] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] _ensure_no_port_binding_failure(port) [ 878.628758] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 878.628758] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] raise exception.PortBindingFailed(port_id=port['id']) [ 878.629050] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] nova.exception.PortBindingFailed: Binding failed for port e078359d-2401-4255-a671-abea7c89c9ed, please check neutron logs for more information. [ 878.629050] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] [ 878.629050] env[61974]: INFO nova.compute.manager [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Terminating instance [ 878.629459] env[61974]: DEBUG oslo_concurrency.lockutils [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "refresh_cache-db03b815-295a-4a66-9afd-a1f4ba97601f" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.629655] env[61974]: DEBUG oslo_concurrency.lockutils [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired lock "refresh_cache-db03b815-295a-4a66-9afd-a1f4ba97601f" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.629779] env[61974]: DEBUG nova.network.neutron [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 878.675894] env[61974]: DEBUG nova.network.neutron [-] [instance: b2d442b2-1927-481c-a232-8514444004a7] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.972878] env[61974]: DEBUG nova.network.neutron [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 879.058274] env[61974]: DEBUG nova.network.neutron [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.147874] env[61974]: DEBUG nova.network.neutron [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 879.178314] env[61974]: INFO nova.compute.manager [-] [instance: b2d442b2-1927-481c-a232-8514444004a7] Took 1.02 seconds to deallocate network for instance. [ 879.180662] env[61974]: DEBUG nova.compute.claims [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 879.180840] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.227934] env[61974]: DEBUG nova.network.neutron [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.561088] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Releasing lock "refresh_cache-5f62ae7a-126f-42ce-9579-57ca02c871d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.561192] env[61974]: DEBUG nova.compute.manager [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 879.561335] env[61974]: DEBUG nova.compute.manager [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 879.561687] env[61974]: DEBUG nova.network.neutron [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 879.581172] env[61974]: DEBUG nova.network.neutron [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 879.730364] env[61974]: DEBUG oslo_concurrency.lockutils [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Releasing lock "refresh_cache-db03b815-295a-4a66-9afd-a1f4ba97601f" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.730802] env[61974]: DEBUG nova.compute.manager [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 879.730995] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 879.733486] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fdbb7b6e-c372-41e8-b022-0ff5138cb301 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.742415] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dfda654-6f87-47ac-b4ac-f6555b7ad116 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.765868] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance db03b815-295a-4a66-9afd-a1f4ba97601f could not be found. [ 879.766131] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 879.766373] env[61974]: INFO nova.compute.manager [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 879.766632] env[61974]: DEBUG oslo.service.loopingcall [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 879.768990] env[61974]: DEBUG nova.compute.manager [-] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 879.769106] env[61974]: DEBUG nova.network.neutron [-] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 879.789775] env[61974]: DEBUG nova.network.neutron [-] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 879.813864] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f02fa8-0b20-422b-9750-39145ea4c6c9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.824029] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2358c7ef-c211-4097-855d-444ef84cc784 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.829259] env[61974]: DEBUG nova.compute.manager [req-ac600cff-48ea-423b-b6cc-d95305e95d54 req-6a18c784-823e-407f-b985-2d990f3f10a5 service nova] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Received event network-changed-e078359d-2401-4255-a671-abea7c89c9ed {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 879.829547] env[61974]: DEBUG nova.compute.manager [req-ac600cff-48ea-423b-b6cc-d95305e95d54 req-6a18c784-823e-407f-b985-2d990f3f10a5 service nova] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Refreshing instance network info cache due to event network-changed-e078359d-2401-4255-a671-abea7c89c9ed. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 879.830727] env[61974]: DEBUG oslo_concurrency.lockutils [req-ac600cff-48ea-423b-b6cc-d95305e95d54 req-6a18c784-823e-407f-b985-2d990f3f10a5 service nova] Acquiring lock "refresh_cache-db03b815-295a-4a66-9afd-a1f4ba97601f" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.830727] env[61974]: DEBUG oslo_concurrency.lockutils [req-ac600cff-48ea-423b-b6cc-d95305e95d54 req-6a18c784-823e-407f-b985-2d990f3f10a5 service nova] Acquired lock "refresh_cache-db03b815-295a-4a66-9afd-a1f4ba97601f" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.830727] env[61974]: DEBUG nova.network.neutron [req-ac600cff-48ea-423b-b6cc-d95305e95d54 req-6a18c784-823e-407f-b985-2d990f3f10a5 service nova] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Refreshing network info cache for port e078359d-2401-4255-a671-abea7c89c9ed {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 879.859640] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c2204c-7516-4351-ab66-f71d4db55641 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.867789] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8376a5e4-98a8-456c-8e62-21669121bd81 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.881761] env[61974]: DEBUG nova.compute.provider_tree [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.084387] env[61974]: DEBUG nova.network.neutron [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.294268] env[61974]: DEBUG nova.network.neutron [-] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.345999] env[61974]: DEBUG nova.network.neutron [req-ac600cff-48ea-423b-b6cc-d95305e95d54 req-6a18c784-823e-407f-b985-2d990f3f10a5 service nova] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 880.388295] env[61974]: DEBUG nova.scheduler.client.report [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 880.412883] env[61974]: DEBUG nova.network.neutron [req-ac600cff-48ea-423b-b6cc-d95305e95d54 req-6a18c784-823e-407f-b985-2d990f3f10a5 service nova] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.586944] env[61974]: INFO nova.compute.manager [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 5f62ae7a-126f-42ce-9579-57ca02c871d8] Took 1.03 seconds to deallocate network for instance. [ 880.798033] env[61974]: INFO nova.compute.manager [-] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Took 1.03 seconds to deallocate network for instance. [ 880.799361] env[61974]: DEBUG nova.compute.claims [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 880.799571] env[61974]: DEBUG oslo_concurrency.lockutils [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.893476] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.445s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.893773] env[61974]: DEBUG nova.compute.manager [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 880.896784] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.875s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.898343] env[61974]: INFO nova.compute.claims [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 880.915950] env[61974]: DEBUG oslo_concurrency.lockutils [req-ac600cff-48ea-423b-b6cc-d95305e95d54 req-6a18c784-823e-407f-b985-2d990f3f10a5 service nova] Releasing lock "refresh_cache-db03b815-295a-4a66-9afd-a1f4ba97601f" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.915950] env[61974]: DEBUG nova.compute.manager [req-ac600cff-48ea-423b-b6cc-d95305e95d54 req-6a18c784-823e-407f-b985-2d990f3f10a5 service nova] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Received event network-vif-deleted-e078359d-2401-4255-a671-abea7c89c9ed {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 881.398544] env[61974]: DEBUG nova.compute.utils [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 881.400336] env[61974]: DEBUG nova.compute.manager [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 881.400496] env[61974]: DEBUG nova.network.neutron [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 881.442988] env[61974]: DEBUG nova.policy [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '09fb3fdafc6b4da7ae87e47badae20c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '73bf71d8afdd4265865a3bd3fc0a8daa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 881.616235] env[61974]: INFO nova.scheduler.client.report [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Deleted allocations for instance 5f62ae7a-126f-42ce-9579-57ca02c871d8 [ 881.724930] env[61974]: DEBUG nova.network.neutron [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Successfully created port: 0c8a9cb4-ec58-4b0e-ac7f-8b476307722b {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 881.903263] env[61974]: DEBUG nova.compute.manager [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 882.130323] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d1ece8bc-00cf-43d6-aaac-a137e3228974 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "5f62ae7a-126f-42ce-9579-57ca02c871d8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 151.677s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.191591] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8736396d-8abd-4d78-a4af-4ab324bab7d7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.198718] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c377ae3-6d2f-4708-9707-36d9e0e0c853 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.229368] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0103810a-69e7-41af-9925-2d9be684754c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.236931] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c89dce90-a031-4e91-bce4-fbe8e8d5d757 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.251160] env[61974]: DEBUG nova.compute.provider_tree [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.488343] env[61974]: DEBUG nova.compute.manager [req-7d08b4ec-5901-4852-aeaf-b86b3a8ae9ba req-364bebee-b355-4d80-9f91-83155964a8e4 service nova] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Received event network-changed-0c8a9cb4-ec58-4b0e-ac7f-8b476307722b {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 882.488602] env[61974]: DEBUG nova.compute.manager [req-7d08b4ec-5901-4852-aeaf-b86b3a8ae9ba req-364bebee-b355-4d80-9f91-83155964a8e4 service nova] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Refreshing instance network info cache due to event network-changed-0c8a9cb4-ec58-4b0e-ac7f-8b476307722b. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 882.488964] env[61974]: DEBUG oslo_concurrency.lockutils [req-7d08b4ec-5901-4852-aeaf-b86b3a8ae9ba req-364bebee-b355-4d80-9f91-83155964a8e4 service nova] Acquiring lock "refresh_cache-5013beda-7f34-44fe-9159-f04e0aca5bce" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.489027] env[61974]: DEBUG oslo_concurrency.lockutils [req-7d08b4ec-5901-4852-aeaf-b86b3a8ae9ba req-364bebee-b355-4d80-9f91-83155964a8e4 service nova] Acquired lock "refresh_cache-5013beda-7f34-44fe-9159-f04e0aca5bce" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.489210] env[61974]: DEBUG nova.network.neutron [req-7d08b4ec-5901-4852-aeaf-b86b3a8ae9ba req-364bebee-b355-4d80-9f91-83155964a8e4 service nova] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Refreshing network info cache for port 0c8a9cb4-ec58-4b0e-ac7f-8b476307722b {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 882.633597] env[61974]: DEBUG nova.compute.manager [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 882.687775] env[61974]: ERROR nova.compute.manager [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0c8a9cb4-ec58-4b0e-ac7f-8b476307722b, please check neutron logs for more information. [ 882.687775] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 882.687775] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 882.687775] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 882.687775] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 882.687775] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 882.687775] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 882.687775] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 882.687775] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 882.687775] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 882.687775] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 882.687775] env[61974]: ERROR nova.compute.manager raise self.value [ 882.687775] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 882.687775] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 882.687775] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 882.687775] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 882.688276] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 882.688276] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 882.688276] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0c8a9cb4-ec58-4b0e-ac7f-8b476307722b, please check neutron logs for more information. [ 882.688276] env[61974]: ERROR nova.compute.manager [ 882.688276] env[61974]: Traceback (most recent call last): [ 882.688276] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 882.688276] env[61974]: listener.cb(fileno) [ 882.688276] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 882.688276] env[61974]: result = function(*args, **kwargs) [ 882.688276] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 882.688276] env[61974]: return func(*args, **kwargs) [ 882.688276] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 882.688276] env[61974]: raise e [ 882.688276] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 882.688276] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 882.688276] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 882.688276] env[61974]: created_port_ids = self._update_ports_for_instance( [ 882.688276] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 882.688276] env[61974]: with excutils.save_and_reraise_exception(): [ 882.688276] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 882.688276] env[61974]: self.force_reraise() [ 882.688276] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 882.688276] env[61974]: raise self.value [ 882.688276] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 882.688276] env[61974]: updated_port = self._update_port( [ 882.688276] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 882.688276] env[61974]: _ensure_no_port_binding_failure(port) [ 882.688276] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 882.688276] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 882.689145] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 0c8a9cb4-ec58-4b0e-ac7f-8b476307722b, please check neutron logs for more information. [ 882.689145] env[61974]: Removing descriptor: 20 [ 882.755605] env[61974]: DEBUG nova.scheduler.client.report [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 882.914306] env[61974]: DEBUG nova.compute.manager [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 882.940627] env[61974]: DEBUG nova.virt.hardware [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 882.940627] env[61974]: DEBUG nova.virt.hardware [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 882.940627] env[61974]: DEBUG nova.virt.hardware [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 882.940825] env[61974]: DEBUG nova.virt.hardware [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 882.940825] env[61974]: DEBUG nova.virt.hardware [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 882.940892] env[61974]: DEBUG nova.virt.hardware [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 882.941108] env[61974]: DEBUG nova.virt.hardware [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 882.941278] env[61974]: DEBUG nova.virt.hardware [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 882.941438] env[61974]: DEBUG nova.virt.hardware [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 882.941595] env[61974]: DEBUG nova.virt.hardware [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 882.941779] env[61974]: DEBUG nova.virt.hardware [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 882.943714] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475dc5d4-c082-475d-9ef6-353adc27f9ba {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.950802] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df73cda-f96e-4baa-a37d-7b9a28a37c57 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.964507] env[61974]: ERROR nova.compute.manager [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0c8a9cb4-ec58-4b0e-ac7f-8b476307722b, please check neutron logs for more information. [ 882.964507] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Traceback (most recent call last): [ 882.964507] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 882.964507] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] yield resources [ 882.964507] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 882.964507] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] self.driver.spawn(context, instance, image_meta, [ 882.964507] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 882.964507] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 882.964507] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 882.964507] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] vm_ref = self.build_virtual_machine(instance, [ 882.964507] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 882.964843] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] vif_infos = vmwarevif.get_vif_info(self._session, [ 882.964843] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 882.964843] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] for vif in network_info: [ 882.964843] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 882.964843] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] return self._sync_wrapper(fn, *args, **kwargs) [ 882.964843] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 882.964843] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] self.wait() [ 882.964843] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 882.964843] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] self[:] = self._gt.wait() [ 882.964843] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 882.964843] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] return self._exit_event.wait() [ 882.964843] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 882.964843] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] current.throw(*self._exc) [ 882.965176] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 882.965176] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] result = function(*args, **kwargs) [ 882.965176] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 882.965176] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] return func(*args, **kwargs) [ 882.965176] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 882.965176] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] raise e [ 882.965176] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 882.965176] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] nwinfo = self.network_api.allocate_for_instance( [ 882.965176] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 882.965176] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] created_port_ids = self._update_ports_for_instance( [ 882.965176] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 882.965176] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] with excutils.save_and_reraise_exception(): [ 882.965176] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 882.965538] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] self.force_reraise() [ 882.965538] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 882.965538] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] raise self.value [ 882.965538] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 882.965538] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] updated_port = self._update_port( [ 882.965538] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 882.965538] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] _ensure_no_port_binding_failure(port) [ 882.965538] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 882.965538] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] raise exception.PortBindingFailed(port_id=port['id']) [ 882.965538] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] nova.exception.PortBindingFailed: Binding failed for port 0c8a9cb4-ec58-4b0e-ac7f-8b476307722b, please check neutron logs for more information. [ 882.965538] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] [ 882.965538] env[61974]: INFO nova.compute.manager [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Terminating instance [ 882.966785] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Acquiring lock "refresh_cache-5013beda-7f34-44fe-9159-f04e0aca5bce" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.008036] env[61974]: DEBUG nova.network.neutron [req-7d08b4ec-5901-4852-aeaf-b86b3a8ae9ba req-364bebee-b355-4d80-9f91-83155964a8e4 service nova] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 883.138663] env[61974]: DEBUG nova.network.neutron [req-7d08b4ec-5901-4852-aeaf-b86b3a8ae9ba req-364bebee-b355-4d80-9f91-83155964a8e4 service nova] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.160352] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.260652] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.364s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.261287] env[61974]: DEBUG nova.compute.manager [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 883.264567] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.194s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.266256] env[61974]: INFO nova.compute.claims [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 883.643745] env[61974]: DEBUG oslo_concurrency.lockutils [req-7d08b4ec-5901-4852-aeaf-b86b3a8ae9ba req-364bebee-b355-4d80-9f91-83155964a8e4 service nova] Releasing lock "refresh_cache-5013beda-7f34-44fe-9159-f04e0aca5bce" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.644231] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Acquired lock "refresh_cache-5013beda-7f34-44fe-9159-f04e0aca5bce" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.644481] env[61974]: DEBUG nova.network.neutron [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 883.771364] env[61974]: DEBUG nova.compute.utils [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 883.775015] env[61974]: DEBUG nova.compute.manager [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 883.775015] env[61974]: DEBUG nova.network.neutron [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 883.824861] env[61974]: DEBUG nova.policy [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '572861a9a5534fd9a7c2d2f6ad5a7f71', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a0f4c33b31d5487ea6befd2a492e6de2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 884.119870] env[61974]: DEBUG nova.network.neutron [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Successfully created port: 53074fd5-f21e-4f14-b97c-4eb2f69b4fa6 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 884.174157] env[61974]: DEBUG nova.network.neutron [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 884.280928] env[61974]: DEBUG nova.compute.manager [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 884.293488] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "ceb0dd02-6441-4923-99f6-73f8eab86fe5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.293488] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "ceb0dd02-6441-4923-99f6-73f8eab86fe5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.317049] env[61974]: DEBUG nova.network.neutron [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.515385] env[61974]: DEBUG nova.compute.manager [req-47bbb677-215e-4988-862f-9be36bebed3f req-b04c6f4c-b090-4cf3-a770-89d13d4d45bf service nova] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Received event network-vif-deleted-0c8a9cb4-ec58-4b0e-ac7f-8b476307722b {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 884.543878] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677f8b49-fcf9-4711-bc41-72ded532e3c7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.551568] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5358a4fe-75da-4f5f-8693-d80e348a057d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.580655] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e31e29ee-1418-4568-b909-960ff71eaea7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.588130] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4878eb41-cbbb-4537-be99-e7c96afe3383 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.601413] env[61974]: DEBUG nova.compute.provider_tree [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.824761] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Releasing lock "refresh_cache-5013beda-7f34-44fe-9159-f04e0aca5bce" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.825236] env[61974]: DEBUG nova.compute.manager [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 884.825455] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 884.825775] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64176434-8faf-4ba1-b3ba-3cbe221cc2ed {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.835129] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf6d31b-addb-4c3d-8eb4-2bcedbf63870 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.857798] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5013beda-7f34-44fe-9159-f04e0aca5bce could not be found. [ 884.857991] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 884.858229] env[61974]: INFO nova.compute.manager [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Took 0.03 seconds to destroy the instance on the hypervisor. [ 884.858504] env[61974]: DEBUG oslo.service.loopingcall [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 884.858729] env[61974]: DEBUG nova.compute.manager [-] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 884.858822] env[61974]: DEBUG nova.network.neutron [-] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 884.888461] env[61974]: DEBUG nova.network.neutron [-] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 885.104179] env[61974]: DEBUG nova.scheduler.client.report [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 885.155203] env[61974]: ERROR nova.compute.manager [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 53074fd5-f21e-4f14-b97c-4eb2f69b4fa6, please check neutron logs for more information. [ 885.155203] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 885.155203] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 885.155203] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 885.155203] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 885.155203] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 885.155203] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 885.155203] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 885.155203] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 885.155203] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 885.155203] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 885.155203] env[61974]: ERROR nova.compute.manager raise self.value [ 885.155203] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 885.155203] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 885.155203] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 885.155203] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 885.155689] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 885.155689] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 885.155689] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 53074fd5-f21e-4f14-b97c-4eb2f69b4fa6, please check neutron logs for more information. [ 885.155689] env[61974]: ERROR nova.compute.manager [ 885.155689] env[61974]: Traceback (most recent call last): [ 885.155689] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 885.155689] env[61974]: listener.cb(fileno) [ 885.155689] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 885.155689] env[61974]: result = function(*args, **kwargs) [ 885.155689] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 885.155689] env[61974]: return func(*args, **kwargs) [ 885.155689] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 885.155689] env[61974]: raise e [ 885.155689] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 885.155689] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 885.155689] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 885.155689] env[61974]: created_port_ids = self._update_ports_for_instance( [ 885.155689] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 885.155689] env[61974]: with excutils.save_and_reraise_exception(): [ 885.155689] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 885.155689] env[61974]: self.force_reraise() [ 885.155689] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 885.155689] env[61974]: raise self.value [ 885.155689] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 885.155689] env[61974]: updated_port = self._update_port( [ 885.155689] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 885.155689] env[61974]: _ensure_no_port_binding_failure(port) [ 885.155689] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 885.155689] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 885.156781] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 53074fd5-f21e-4f14-b97c-4eb2f69b4fa6, please check neutron logs for more information. [ 885.156781] env[61974]: Removing descriptor: 20 [ 885.297619] env[61974]: DEBUG nova.compute.manager [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 885.323354] env[61974]: DEBUG nova.virt.hardware [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 885.323677] env[61974]: DEBUG nova.virt.hardware [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 885.323835] env[61974]: DEBUG nova.virt.hardware [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 885.324028] env[61974]: DEBUG nova.virt.hardware [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 885.324182] env[61974]: DEBUG nova.virt.hardware [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 885.324327] env[61974]: DEBUG nova.virt.hardware [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 885.324527] env[61974]: DEBUG nova.virt.hardware [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 885.324684] env[61974]: DEBUG nova.virt.hardware [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 885.324847] env[61974]: DEBUG nova.virt.hardware [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 885.325016] env[61974]: DEBUG nova.virt.hardware [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 885.325198] env[61974]: DEBUG nova.virt.hardware [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 885.326264] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f18d47-3373-4827-b5f6-32640519a8e3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.334348] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa00ac6-10ec-4b05-9d19-2df222c51096 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.348354] env[61974]: ERROR nova.compute.manager [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 53074fd5-f21e-4f14-b97c-4eb2f69b4fa6, please check neutron logs for more information. [ 885.348354] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Traceback (most recent call last): [ 885.348354] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 885.348354] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] yield resources [ 885.348354] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 885.348354] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] self.driver.spawn(context, instance, image_meta, [ 885.348354] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 885.348354] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 885.348354] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 885.348354] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] vm_ref = self.build_virtual_machine(instance, [ 885.348354] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 885.348658] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] vif_infos = vmwarevif.get_vif_info(self._session, [ 885.348658] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 885.348658] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] for vif in network_info: [ 885.348658] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 885.348658] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] return self._sync_wrapper(fn, *args, **kwargs) [ 885.348658] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 885.348658] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] self.wait() [ 885.348658] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 885.348658] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] self[:] = self._gt.wait() [ 885.348658] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 885.348658] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] return self._exit_event.wait() [ 885.348658] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 885.348658] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] current.throw(*self._exc) [ 885.349064] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 885.349064] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] result = function(*args, **kwargs) [ 885.349064] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 885.349064] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] return func(*args, **kwargs) [ 885.349064] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 885.349064] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] raise e [ 885.349064] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 885.349064] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] nwinfo = self.network_api.allocate_for_instance( [ 885.349064] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 885.349064] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] created_port_ids = self._update_ports_for_instance( [ 885.349064] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 885.349064] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] with excutils.save_and_reraise_exception(): [ 885.349064] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 885.349440] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] self.force_reraise() [ 885.349440] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 885.349440] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] raise self.value [ 885.349440] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 885.349440] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] updated_port = self._update_port( [ 885.349440] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 885.349440] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] _ensure_no_port_binding_failure(port) [ 885.349440] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 885.349440] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] raise exception.PortBindingFailed(port_id=port['id']) [ 885.349440] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] nova.exception.PortBindingFailed: Binding failed for port 53074fd5-f21e-4f14-b97c-4eb2f69b4fa6, please check neutron logs for more information. [ 885.349440] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] [ 885.349440] env[61974]: INFO nova.compute.manager [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Terminating instance [ 885.350810] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Acquiring lock "refresh_cache-b62397bb-95b4-4d07-819a-bfcfd7c6a38e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.350967] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Acquired lock "refresh_cache-b62397bb-95b4-4d07-819a-bfcfd7c6a38e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.351154] env[61974]: DEBUG nova.network.neutron [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 885.390640] env[61974]: DEBUG nova.network.neutron [-] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.609326] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.345s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.609914] env[61974]: DEBUG nova.compute.manager [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 885.612895] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.736s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.614287] env[61974]: INFO nova.compute.claims [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 885.868284] env[61974]: DEBUG nova.network.neutron [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 885.893343] env[61974]: INFO nova.compute.manager [-] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Took 1.03 seconds to deallocate network for instance. [ 885.897961] env[61974]: DEBUG nova.compute.claims [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 885.898172] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.963260] env[61974]: DEBUG nova.network.neutron [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.119145] env[61974]: DEBUG nova.compute.utils [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 886.122232] env[61974]: DEBUG nova.compute.manager [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 886.122391] env[61974]: DEBUG nova.network.neutron [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 886.169576] env[61974]: DEBUG nova.policy [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5938be9422cd407fbfe6e343a78baa06', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '589633cfa9694483af282017afb972f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 886.456772] env[61974]: DEBUG nova.network.neutron [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Successfully created port: fd2c009c-6335-46c7-a75c-2537847bbe48 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 886.465568] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Releasing lock "refresh_cache-b62397bb-95b4-4d07-819a-bfcfd7c6a38e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.466031] env[61974]: DEBUG nova.compute.manager [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 886.466245] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 886.466547] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f55f746-72d1-43c3-886f-ee850fe7022b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.475916] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07c3782-4c43-45d4-bf3a-fe31087fecdd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.501584] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b62397bb-95b4-4d07-819a-bfcfd7c6a38e could not be found. [ 886.501820] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 886.502209] env[61974]: INFO nova.compute.manager [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 886.502278] env[61974]: DEBUG oslo.service.loopingcall [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 886.502493] env[61974]: DEBUG nova.compute.manager [-] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 886.502586] env[61974]: DEBUG nova.network.neutron [-] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 886.524752] env[61974]: DEBUG nova.network.neutron [-] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 886.541352] env[61974]: DEBUG nova.compute.manager [req-be705fe9-e6c5-414a-bde5-4d5430db72b0 req-c1f7c5f7-b511-4506-a7bf-c855da6d4d6b service nova] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Received event network-changed-53074fd5-f21e-4f14-b97c-4eb2f69b4fa6 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 886.541531] env[61974]: DEBUG nova.compute.manager [req-be705fe9-e6c5-414a-bde5-4d5430db72b0 req-c1f7c5f7-b511-4506-a7bf-c855da6d4d6b service nova] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Refreshing instance network info cache due to event network-changed-53074fd5-f21e-4f14-b97c-4eb2f69b4fa6. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 886.541737] env[61974]: DEBUG oslo_concurrency.lockutils [req-be705fe9-e6c5-414a-bde5-4d5430db72b0 req-c1f7c5f7-b511-4506-a7bf-c855da6d4d6b service nova] Acquiring lock "refresh_cache-b62397bb-95b4-4d07-819a-bfcfd7c6a38e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.541883] env[61974]: DEBUG oslo_concurrency.lockutils [req-be705fe9-e6c5-414a-bde5-4d5430db72b0 req-c1f7c5f7-b511-4506-a7bf-c855da6d4d6b service nova] Acquired lock "refresh_cache-b62397bb-95b4-4d07-819a-bfcfd7c6a38e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.542053] env[61974]: DEBUG nova.network.neutron [req-be705fe9-e6c5-414a-bde5-4d5430db72b0 req-c1f7c5f7-b511-4506-a7bf-c855da6d4d6b service nova] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Refreshing network info cache for port 53074fd5-f21e-4f14-b97c-4eb2f69b4fa6 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 886.623498] env[61974]: DEBUG nova.compute.manager [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 886.985023] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74265122-cd32-4906-8367-7e7dfd4b9747 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.991519] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f735053-61d6-4d08-ba63-af1723d61f74 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.022099] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb77d97-2094-4e30-a650-9cad64d9540c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.027548] env[61974]: DEBUG nova.network.neutron [-] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.031091] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fdd09ec-85bc-4957-9471-25d17424fb6a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.035848] env[61974]: INFO nova.compute.manager [-] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Took 0.53 seconds to deallocate network for instance. [ 887.043330] env[61974]: DEBUG nova.compute.claims [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 887.043508] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.055673] env[61974]: DEBUG nova.compute.provider_tree [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 887.231688] env[61974]: DEBUG nova.network.neutron [req-be705fe9-e6c5-414a-bde5-4d5430db72b0 req-c1f7c5f7-b511-4506-a7bf-c855da6d4d6b service nova] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 887.354777] env[61974]: DEBUG nova.network.neutron [req-be705fe9-e6c5-414a-bde5-4d5430db72b0 req-c1f7c5f7-b511-4506-a7bf-c855da6d4d6b service nova] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.560191] env[61974]: DEBUG nova.scheduler.client.report [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 887.640022] env[61974]: DEBUG nova.compute.manager [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 887.667828] env[61974]: DEBUG nova.virt.hardware [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 887.668162] env[61974]: DEBUG nova.virt.hardware [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 887.669530] env[61974]: DEBUG nova.virt.hardware [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 887.669530] env[61974]: DEBUG nova.virt.hardware [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 887.669530] env[61974]: DEBUG nova.virt.hardware [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 887.669530] env[61974]: DEBUG nova.virt.hardware [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 887.669530] env[61974]: DEBUG nova.virt.hardware [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 887.669750] env[61974]: DEBUG nova.virt.hardware [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 887.669750] env[61974]: DEBUG nova.virt.hardware [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 887.669750] env[61974]: DEBUG nova.virt.hardware [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 887.669750] env[61974]: DEBUG nova.virt.hardware [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 887.670581] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-734a4c59-a2a8-4acb-814b-41b278bfb5ec {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.679024] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80cd9f9-5304-4424-ba0b-2621bd6cfeaa {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.698227] env[61974]: ERROR nova.compute.manager [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fd2c009c-6335-46c7-a75c-2537847bbe48, please check neutron logs for more information. [ 887.698227] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 887.698227] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 887.698227] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 887.698227] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 887.698227] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 887.698227] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 887.698227] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 887.698227] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 887.698227] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 887.698227] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 887.698227] env[61974]: ERROR nova.compute.manager raise self.value [ 887.698227] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 887.698227] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 887.698227] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 887.698227] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 887.698720] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 887.698720] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 887.698720] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fd2c009c-6335-46c7-a75c-2537847bbe48, please check neutron logs for more information. [ 887.698720] env[61974]: ERROR nova.compute.manager [ 887.698720] env[61974]: Traceback (most recent call last): [ 887.698720] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 887.698720] env[61974]: listener.cb(fileno) [ 887.698720] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 887.698720] env[61974]: result = function(*args, **kwargs) [ 887.698720] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 887.698720] env[61974]: return func(*args, **kwargs) [ 887.698720] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 887.698720] env[61974]: raise e [ 887.698720] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 887.698720] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 887.698720] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 887.698720] env[61974]: created_port_ids = self._update_ports_for_instance( [ 887.698720] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 887.698720] env[61974]: with excutils.save_and_reraise_exception(): [ 887.698720] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 887.698720] env[61974]: self.force_reraise() [ 887.698720] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 887.698720] env[61974]: raise self.value [ 887.698720] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 887.698720] env[61974]: updated_port = self._update_port( [ 887.698720] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 887.698720] env[61974]: _ensure_no_port_binding_failure(port) [ 887.698720] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 887.698720] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 887.699696] env[61974]: nova.exception.PortBindingFailed: Binding failed for port fd2c009c-6335-46c7-a75c-2537847bbe48, please check neutron logs for more information. [ 887.699696] env[61974]: Removing descriptor: 20 [ 887.699696] env[61974]: ERROR nova.compute.manager [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fd2c009c-6335-46c7-a75c-2537847bbe48, please check neutron logs for more information. [ 887.699696] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Traceback (most recent call last): [ 887.699696] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 887.699696] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] yield resources [ 887.699696] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 887.699696] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] self.driver.spawn(context, instance, image_meta, [ 887.699696] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 887.699696] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] self._vmops.spawn(context, instance, image_meta, injected_files, [ 887.699696] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 887.699696] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] vm_ref = self.build_virtual_machine(instance, [ 887.700064] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 887.700064] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] vif_infos = vmwarevif.get_vif_info(self._session, [ 887.700064] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 887.700064] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] for vif in network_info: [ 887.700064] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 887.700064] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] return self._sync_wrapper(fn, *args, **kwargs) [ 887.700064] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 887.700064] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] self.wait() [ 887.700064] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 887.700064] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] self[:] = self._gt.wait() [ 887.700064] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 887.700064] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] return self._exit_event.wait() [ 887.700064] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 887.700445] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] result = hub.switch() [ 887.700445] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 887.700445] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] return self.greenlet.switch() [ 887.700445] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 887.700445] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] result = function(*args, **kwargs) [ 887.700445] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 887.700445] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] return func(*args, **kwargs) [ 887.700445] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 887.700445] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] raise e [ 887.700445] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 887.700445] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] nwinfo = self.network_api.allocate_for_instance( [ 887.700445] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 887.700445] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] created_port_ids = self._update_ports_for_instance( [ 887.700855] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 887.700855] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] with excutils.save_and_reraise_exception(): [ 887.700855] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 887.700855] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] self.force_reraise() [ 887.700855] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 887.700855] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] raise self.value [ 887.700855] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 887.700855] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] updated_port = self._update_port( [ 887.700855] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 887.700855] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] _ensure_no_port_binding_failure(port) [ 887.700855] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 887.700855] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] raise exception.PortBindingFailed(port_id=port['id']) [ 887.701202] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] nova.exception.PortBindingFailed: Binding failed for port fd2c009c-6335-46c7-a75c-2537847bbe48, please check neutron logs for more information. [ 887.701202] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] [ 887.701202] env[61974]: INFO nova.compute.manager [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Terminating instance [ 887.702888] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Acquiring lock "refresh_cache-11d4f981-b167-4c81-9cd7-7e939606d400" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 887.703063] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Acquired lock "refresh_cache-11d4f981-b167-4c81-9cd7-7e939606d400" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.703495] env[61974]: DEBUG nova.network.neutron [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 887.857436] env[61974]: DEBUG oslo_concurrency.lockutils [req-be705fe9-e6c5-414a-bde5-4d5430db72b0 req-c1f7c5f7-b511-4506-a7bf-c855da6d4d6b service nova] Releasing lock "refresh_cache-b62397bb-95b4-4d07-819a-bfcfd7c6a38e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.857724] env[61974]: DEBUG nova.compute.manager [req-be705fe9-e6c5-414a-bde5-4d5430db72b0 req-c1f7c5f7-b511-4506-a7bf-c855da6d4d6b service nova] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Received event network-vif-deleted-53074fd5-f21e-4f14-b97c-4eb2f69b4fa6 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 888.071754] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.459s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.072301] env[61974]: DEBUG nova.compute.manager [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 888.075211] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.893s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.221821] env[61974]: DEBUG nova.network.neutron [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 888.325603] env[61974]: DEBUG nova.network.neutron [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.574294] env[61974]: DEBUG nova.compute.manager [req-23ebf6c6-ffa2-4c2d-ad63-bde5c9171791 req-d987abbf-c385-4aff-97a9-3c5cc3292f22 service nova] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Received event network-changed-fd2c009c-6335-46c7-a75c-2537847bbe48 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 888.574819] env[61974]: DEBUG nova.compute.manager [req-23ebf6c6-ffa2-4c2d-ad63-bde5c9171791 req-d987abbf-c385-4aff-97a9-3c5cc3292f22 service nova] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Refreshing instance network info cache due to event network-changed-fd2c009c-6335-46c7-a75c-2537847bbe48. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 888.575044] env[61974]: DEBUG oslo_concurrency.lockutils [req-23ebf6c6-ffa2-4c2d-ad63-bde5c9171791 req-d987abbf-c385-4aff-97a9-3c5cc3292f22 service nova] Acquiring lock "refresh_cache-11d4f981-b167-4c81-9cd7-7e939606d400" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.578863] env[61974]: DEBUG nova.compute.utils [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 888.582471] env[61974]: DEBUG nova.compute.manager [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 888.582636] env[61974]: DEBUG nova.network.neutron [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 888.631062] env[61974]: DEBUG nova.policy [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '04c31ef1766849e08061996c57326110', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '208e3226aa9c4ca8842d86fdc303fb84', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 888.827903] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Releasing lock "refresh_cache-11d4f981-b167-4c81-9cd7-7e939606d400" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.828379] env[61974]: DEBUG nova.compute.manager [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 888.828575] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 888.828892] env[61974]: DEBUG oslo_concurrency.lockutils [req-23ebf6c6-ffa2-4c2d-ad63-bde5c9171791 req-d987abbf-c385-4aff-97a9-3c5cc3292f22 service nova] Acquired lock "refresh_cache-11d4f981-b167-4c81-9cd7-7e939606d400" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.829112] env[61974]: DEBUG nova.network.neutron [req-23ebf6c6-ffa2-4c2d-ad63-bde5c9171791 req-d987abbf-c385-4aff-97a9-3c5cc3292f22 service nova] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Refreshing network info cache for port fd2c009c-6335-46c7-a75c-2537847bbe48 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 888.830277] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b98f5055-47fa-41be-8ce8-cee3fcc77eeb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.842046] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c1a5042-78c4-41d5-8b62-c8255cf512ca {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.863988] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80db56d2-9003-4fc6-b126-69b4973c4006 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.870782] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 11d4f981-b167-4c81-9cd7-7e939606d400 could not be found. [ 888.871685] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 888.871685] env[61974]: INFO nova.compute.manager [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Took 0.04 seconds to destroy the instance on the hypervisor. [ 888.871685] env[61974]: DEBUG oslo.service.loopingcall [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 888.871887] env[61974]: DEBUG nova.compute.manager [-] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 888.871980] env[61974]: DEBUG nova.network.neutron [-] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 888.876427] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-071fbabe-f5da-4a8c-9be6-aaff91b4a0fd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.908698] env[61974]: DEBUG nova.network.neutron [-] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 888.910833] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58dfcdc0-d515-4d41-a084-2c55b093b77f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.919684] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7006dbc5-3dcb-49b4-9dcc-8391f72b3898 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.923989] env[61974]: DEBUG nova.network.neutron [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Successfully created port: 385f568f-5c08-4f71-861a-98148d22e849 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 888.936531] env[61974]: DEBUG nova.compute.provider_tree [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 889.085996] env[61974]: DEBUG nova.compute.manager [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 889.353480] env[61974]: DEBUG nova.network.neutron [req-23ebf6c6-ffa2-4c2d-ad63-bde5c9171791 req-d987abbf-c385-4aff-97a9-3c5cc3292f22 service nova] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 889.414290] env[61974]: DEBUG nova.network.neutron [-] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.441720] env[61974]: DEBUG nova.scheduler.client.report [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 889.485446] env[61974]: DEBUG nova.network.neutron [req-23ebf6c6-ffa2-4c2d-ad63-bde5c9171791 req-d987abbf-c385-4aff-97a9-3c5cc3292f22 service nova] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.916844] env[61974]: INFO nova.compute.manager [-] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Took 1.04 seconds to deallocate network for instance. [ 889.919350] env[61974]: DEBUG nova.compute.claims [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 889.919529] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.946694] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.871s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.947430] env[61974]: ERROR nova.compute.manager [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5fd1f426-dc0b-4645-af65-d7b50ec60cfb, please check neutron logs for more information. [ 889.947430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Traceback (most recent call last): [ 889.947430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 889.947430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] self.driver.spawn(context, instance, image_meta, [ 889.947430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 889.947430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] self._vmops.spawn(context, instance, image_meta, injected_files, [ 889.947430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 889.947430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] vm_ref = self.build_virtual_machine(instance, [ 889.947430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 889.947430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] vif_infos = vmwarevif.get_vif_info(self._session, [ 889.947430] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 889.947746] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] for vif in network_info: [ 889.947746] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 889.947746] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] return self._sync_wrapper(fn, *args, **kwargs) [ 889.947746] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 889.947746] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] self.wait() [ 889.947746] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 889.947746] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] self[:] = self._gt.wait() [ 889.947746] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 889.947746] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] return self._exit_event.wait() [ 889.947746] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 889.947746] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] result = hub.switch() [ 889.947746] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 889.947746] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] return self.greenlet.switch() [ 889.948243] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 889.948243] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] result = function(*args, **kwargs) [ 889.948243] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 889.948243] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] return func(*args, **kwargs) [ 889.948243] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 889.948243] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] raise e [ 889.948243] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 889.948243] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] nwinfo = self.network_api.allocate_for_instance( [ 889.948243] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 889.948243] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] created_port_ids = self._update_ports_for_instance( [ 889.948243] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 889.948243] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] with excutils.save_and_reraise_exception(): [ 889.948243] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 889.948595] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] self.force_reraise() [ 889.948595] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 889.948595] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] raise self.value [ 889.948595] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 889.948595] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] updated_port = self._update_port( [ 889.948595] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 889.948595] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] _ensure_no_port_binding_failure(port) [ 889.948595] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 889.948595] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] raise exception.PortBindingFailed(port_id=port['id']) [ 889.948595] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] nova.exception.PortBindingFailed: Binding failed for port 5fd1f426-dc0b-4645-af65-d7b50ec60cfb, please check neutron logs for more information. [ 889.948595] env[61974]: ERROR nova.compute.manager [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] [ 889.948874] env[61974]: DEBUG nova.compute.utils [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Binding failed for port 5fd1f426-dc0b-4645-af65-d7b50ec60cfb, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 889.949699] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.443s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.950961] env[61974]: INFO nova.compute.claims [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 889.954173] env[61974]: DEBUG nova.compute.manager [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Build of instance 4545e438-8784-4911-bf2e-8eb14d38c308 was re-scheduled: Binding failed for port 5fd1f426-dc0b-4645-af65-d7b50ec60cfb, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 889.954573] env[61974]: DEBUG nova.compute.manager [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 889.954812] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Acquiring lock "refresh_cache-4545e438-8784-4911-bf2e-8eb14d38c308" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.954970] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Acquired lock "refresh_cache-4545e438-8784-4911-bf2e-8eb14d38c308" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.955142] env[61974]: DEBUG nova.network.neutron [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 889.956723] env[61974]: ERROR nova.compute.manager [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 385f568f-5c08-4f71-861a-98148d22e849, please check neutron logs for more information. [ 889.956723] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 889.956723] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 889.956723] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 889.956723] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 889.956723] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 889.956723] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 889.956723] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 889.956723] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 889.956723] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 889.956723] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 889.956723] env[61974]: ERROR nova.compute.manager raise self.value [ 889.956723] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 889.956723] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 889.956723] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 889.956723] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 889.957399] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 889.957399] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 889.957399] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 385f568f-5c08-4f71-861a-98148d22e849, please check neutron logs for more information. [ 889.957399] env[61974]: ERROR nova.compute.manager [ 889.957399] env[61974]: Traceback (most recent call last): [ 889.957399] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 889.957399] env[61974]: listener.cb(fileno) [ 889.957399] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 889.957399] env[61974]: result = function(*args, **kwargs) [ 889.957399] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 889.957399] env[61974]: return func(*args, **kwargs) [ 889.957399] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 889.957399] env[61974]: raise e [ 889.957399] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 889.957399] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 889.957399] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 889.957399] env[61974]: created_port_ids = self._update_ports_for_instance( [ 889.957399] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 889.957399] env[61974]: with excutils.save_and_reraise_exception(): [ 889.957399] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 889.957399] env[61974]: self.force_reraise() [ 889.957399] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 889.957399] env[61974]: raise self.value [ 889.957399] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 889.957399] env[61974]: updated_port = self._update_port( [ 889.957399] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 889.957399] env[61974]: _ensure_no_port_binding_failure(port) [ 889.957399] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 889.957399] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 889.958344] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 385f568f-5c08-4f71-861a-98148d22e849, please check neutron logs for more information. [ 889.958344] env[61974]: Removing descriptor: 20 [ 889.986887] env[61974]: DEBUG oslo_concurrency.lockutils [req-23ebf6c6-ffa2-4c2d-ad63-bde5c9171791 req-d987abbf-c385-4aff-97a9-3c5cc3292f22 service nova] Releasing lock "refresh_cache-11d4f981-b167-4c81-9cd7-7e939606d400" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.986887] env[61974]: DEBUG nova.compute.manager [req-23ebf6c6-ffa2-4c2d-ad63-bde5c9171791 req-d987abbf-c385-4aff-97a9-3c5cc3292f22 service nova] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Received event network-vif-deleted-fd2c009c-6335-46c7-a75c-2537847bbe48 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 890.096743] env[61974]: DEBUG nova.compute.manager [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 890.211863] env[61974]: DEBUG nova.virt.hardware [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 890.212144] env[61974]: DEBUG nova.virt.hardware [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 890.212310] env[61974]: DEBUG nova.virt.hardware [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 890.212493] env[61974]: DEBUG nova.virt.hardware [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 890.212640] env[61974]: DEBUG nova.virt.hardware [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 890.212787] env[61974]: DEBUG nova.virt.hardware [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 890.212989] env[61974]: DEBUG nova.virt.hardware [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 890.213178] env[61974]: DEBUG nova.virt.hardware [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 890.213346] env[61974]: DEBUG nova.virt.hardware [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 890.213583] env[61974]: DEBUG nova.virt.hardware [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 890.213709] env[61974]: DEBUG nova.virt.hardware [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 890.214610] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc52199-8fd9-48ae-be94-99a34a9b1aa8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.222255] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9971047a-f033-480e-ad89-4dd1acdc3767 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.235661] env[61974]: ERROR nova.compute.manager [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 385f568f-5c08-4f71-861a-98148d22e849, please check neutron logs for more information. [ 890.235661] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Traceback (most recent call last): [ 890.235661] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 890.235661] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] yield resources [ 890.235661] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 890.235661] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] self.driver.spawn(context, instance, image_meta, [ 890.235661] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 890.235661] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 890.235661] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 890.235661] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] vm_ref = self.build_virtual_machine(instance, [ 890.235661] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 890.236053] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] vif_infos = vmwarevif.get_vif_info(self._session, [ 890.236053] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 890.236053] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] for vif in network_info: [ 890.236053] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 890.236053] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] return self._sync_wrapper(fn, *args, **kwargs) [ 890.236053] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 890.236053] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] self.wait() [ 890.236053] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 890.236053] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] self[:] = self._gt.wait() [ 890.236053] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 890.236053] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] return self._exit_event.wait() [ 890.236053] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 890.236053] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] current.throw(*self._exc) [ 890.236439] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 890.236439] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] result = function(*args, **kwargs) [ 890.236439] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 890.236439] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] return func(*args, **kwargs) [ 890.236439] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 890.236439] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] raise e [ 890.236439] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 890.236439] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] nwinfo = self.network_api.allocate_for_instance( [ 890.236439] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 890.236439] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] created_port_ids = self._update_ports_for_instance( [ 890.236439] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 890.236439] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] with excutils.save_and_reraise_exception(): [ 890.236439] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 890.236995] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] self.force_reraise() [ 890.236995] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 890.236995] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] raise self.value [ 890.236995] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 890.236995] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] updated_port = self._update_port( [ 890.236995] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 890.236995] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] _ensure_no_port_binding_failure(port) [ 890.236995] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 890.236995] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] raise exception.PortBindingFailed(port_id=port['id']) [ 890.236995] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] nova.exception.PortBindingFailed: Binding failed for port 385f568f-5c08-4f71-861a-98148d22e849, please check neutron logs for more information. [ 890.236995] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] [ 890.236995] env[61974]: INFO nova.compute.manager [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Terminating instance [ 890.238369] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Acquiring lock "refresh_cache-0a62f878-43c1-4aaf-9054-798572b4faa7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.238539] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Acquired lock "refresh_cache-0a62f878-43c1-4aaf-9054-798572b4faa7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.238710] env[61974]: DEBUG nova.network.neutron [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 890.478288] env[61974]: DEBUG nova.network.neutron [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 890.567559] env[61974]: DEBUG nova.network.neutron [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.598730] env[61974]: DEBUG nova.compute.manager [req-1e29cede-06c5-4c22-95bf-d41edd8210ab req-35249a17-3e57-4f1e-8745-3dc828c1df4a service nova] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Received event network-changed-385f568f-5c08-4f71-861a-98148d22e849 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 890.598961] env[61974]: DEBUG nova.compute.manager [req-1e29cede-06c5-4c22-95bf-d41edd8210ab req-35249a17-3e57-4f1e-8745-3dc828c1df4a service nova] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Refreshing instance network info cache due to event network-changed-385f568f-5c08-4f71-861a-98148d22e849. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 890.599170] env[61974]: DEBUG oslo_concurrency.lockutils [req-1e29cede-06c5-4c22-95bf-d41edd8210ab req-35249a17-3e57-4f1e-8745-3dc828c1df4a service nova] Acquiring lock "refresh_cache-0a62f878-43c1-4aaf-9054-798572b4faa7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.757037] env[61974]: DEBUG nova.network.neutron [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 890.846213] env[61974]: DEBUG nova.network.neutron [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.070344] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Releasing lock "refresh_cache-4545e438-8784-4911-bf2e-8eb14d38c308" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.070785] env[61974]: DEBUG nova.compute.manager [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 891.070785] env[61974]: DEBUG nova.compute.manager [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 891.071023] env[61974]: DEBUG nova.network.neutron [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 891.095602] env[61974]: DEBUG nova.network.neutron [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 891.212612] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456067e6-626c-4cb1-846f-4bb217aeaf7d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.219795] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49f4393-a699-4c75-a9bf-4253c06b2ef8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.250995] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f7f77e-34a9-42bf-ab06-85474d981bb3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.257768] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63074336-d225-408c-8aa4-52110141644a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.270487] env[61974]: DEBUG nova.compute.provider_tree [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 891.349248] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Releasing lock "refresh_cache-0a62f878-43c1-4aaf-9054-798572b4faa7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.349658] env[61974]: DEBUG nova.compute.manager [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 891.349852] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 891.350253] env[61974]: DEBUG oslo_concurrency.lockutils [req-1e29cede-06c5-4c22-95bf-d41edd8210ab req-35249a17-3e57-4f1e-8745-3dc828c1df4a service nova] Acquired lock "refresh_cache-0a62f878-43c1-4aaf-9054-798572b4faa7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.350453] env[61974]: DEBUG nova.network.neutron [req-1e29cede-06c5-4c22-95bf-d41edd8210ab req-35249a17-3e57-4f1e-8745-3dc828c1df4a service nova] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Refreshing network info cache for port 385f568f-5c08-4f71-861a-98148d22e849 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 891.351429] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cd445698-e05a-4086-ad4a-fbf16c6f141b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.360858] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd7a302-1648-4fe2-87c4-d868b523f3f7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.382320] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0a62f878-43c1-4aaf-9054-798572b4faa7 could not be found. [ 891.382475] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 891.382661] env[61974]: INFO nova.compute.manager [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Took 0.03 seconds to destroy the instance on the hypervisor. [ 891.382903] env[61974]: DEBUG oslo.service.loopingcall [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 891.383176] env[61974]: DEBUG nova.compute.manager [-] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 891.383349] env[61974]: DEBUG nova.network.neutron [-] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 891.399455] env[61974]: DEBUG nova.network.neutron [-] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 891.598704] env[61974]: DEBUG nova.network.neutron [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.774150] env[61974]: DEBUG nova.scheduler.client.report [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 891.902331] env[61974]: DEBUG nova.network.neutron [req-1e29cede-06c5-4c22-95bf-d41edd8210ab req-35249a17-3e57-4f1e-8745-3dc828c1df4a service nova] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 891.904225] env[61974]: DEBUG nova.network.neutron [-] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.984113] env[61974]: DEBUG nova.network.neutron [req-1e29cede-06c5-4c22-95bf-d41edd8210ab req-35249a17-3e57-4f1e-8745-3dc828c1df4a service nova] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.102122] env[61974]: INFO nova.compute.manager [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] [instance: 4545e438-8784-4911-bf2e-8eb14d38c308] Took 1.03 seconds to deallocate network for instance. [ 892.278925] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.329s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.279450] env[61974]: DEBUG nova.compute.manager [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 892.281947] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.956s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.406516] env[61974]: INFO nova.compute.manager [-] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Took 1.02 seconds to deallocate network for instance. [ 892.408864] env[61974]: DEBUG nova.compute.claims [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 892.409083] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.486424] env[61974]: DEBUG oslo_concurrency.lockutils [req-1e29cede-06c5-4c22-95bf-d41edd8210ab req-35249a17-3e57-4f1e-8745-3dc828c1df4a service nova] Releasing lock "refresh_cache-0a62f878-43c1-4aaf-9054-798572b4faa7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.486677] env[61974]: DEBUG nova.compute.manager [req-1e29cede-06c5-4c22-95bf-d41edd8210ab req-35249a17-3e57-4f1e-8745-3dc828c1df4a service nova] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Received event network-vif-deleted-385f568f-5c08-4f71-861a-98148d22e849 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 892.787097] env[61974]: DEBUG nova.compute.utils [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 892.791464] env[61974]: DEBUG nova.compute.manager [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 892.791638] env[61974]: DEBUG nova.network.neutron [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 892.831333] env[61974]: DEBUG nova.policy [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10148b3369de4608b6f73226b86dc02e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fcd4aebfaf0f494c98cde099c7e28363', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 893.050350] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba08388-d9f2-47b2-8ae7-93d8319462dc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.057697] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-644c22b3-2848-4013-a1e0-729888417820 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.087903] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3be9e87-97c5-45e7-afe8-e985e016e0b4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.093615] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf2556c-912f-4355-bc43-24e472c25735 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.106304] env[61974]: DEBUG nova.compute.provider_tree [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.108245] env[61974]: DEBUG nova.network.neutron [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Successfully created port: 494bb9c4-79d8-4ab3-b214-874dd1e3ceb5 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 893.130377] env[61974]: INFO nova.scheduler.client.report [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Deleted allocations for instance 4545e438-8784-4911-bf2e-8eb14d38c308 [ 893.292639] env[61974]: DEBUG nova.compute.manager [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 893.613487] env[61974]: DEBUG nova.scheduler.client.report [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 893.637699] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e9cadeb8-8031-4fb1-856d-1d3033e48734 tempest-InstanceActionsTestJSON-1507823780 tempest-InstanceActionsTestJSON-1507823780-project-member] Lock "4545e438-8784-4911-bf2e-8eb14d38c308" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 155.115s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.856695] env[61974]: DEBUG nova.compute.manager [req-d79bcafe-6551-4210-8e75-4dd923ab0f1f req-3239cfae-a2ee-4c0a-a60d-fe469ba91d71 service nova] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Received event network-changed-494bb9c4-79d8-4ab3-b214-874dd1e3ceb5 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 893.856930] env[61974]: DEBUG nova.compute.manager [req-d79bcafe-6551-4210-8e75-4dd923ab0f1f req-3239cfae-a2ee-4c0a-a60d-fe469ba91d71 service nova] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Refreshing instance network info cache due to event network-changed-494bb9c4-79d8-4ab3-b214-874dd1e3ceb5. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 893.857179] env[61974]: DEBUG oslo_concurrency.lockutils [req-d79bcafe-6551-4210-8e75-4dd923ab0f1f req-3239cfae-a2ee-4c0a-a60d-fe469ba91d71 service nova] Acquiring lock "refresh_cache-68794d97-95f7-4612-9f9f-e370afb3d852" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.857336] env[61974]: DEBUG oslo_concurrency.lockutils [req-d79bcafe-6551-4210-8e75-4dd923ab0f1f req-3239cfae-a2ee-4c0a-a60d-fe469ba91d71 service nova] Acquired lock "refresh_cache-68794d97-95f7-4612-9f9f-e370afb3d852" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.857497] env[61974]: DEBUG nova.network.neutron [req-d79bcafe-6551-4210-8e75-4dd923ab0f1f req-3239cfae-a2ee-4c0a-a60d-fe469ba91d71 service nova] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Refreshing network info cache for port 494bb9c4-79d8-4ab3-b214-874dd1e3ceb5 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 894.050538] env[61974]: ERROR nova.compute.manager [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 494bb9c4-79d8-4ab3-b214-874dd1e3ceb5, please check neutron logs for more information. [ 894.050538] env[61974]: ERROR nova.compute.manager Traceback (most recent call last): [ 894.050538] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 894.050538] env[61974]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 894.050538] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 894.050538] env[61974]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 894.050538] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 894.050538] env[61974]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 894.050538] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 894.050538] env[61974]: ERROR nova.compute.manager self.force_reraise() [ 894.050538] env[61974]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 894.050538] env[61974]: ERROR nova.compute.manager raise self.value [ 894.050538] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 894.050538] env[61974]: ERROR nova.compute.manager updated_port = self._update_port( [ 894.050538] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 894.050538] env[61974]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 894.050988] env[61974]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 894.050988] env[61974]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 894.050988] env[61974]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 494bb9c4-79d8-4ab3-b214-874dd1e3ceb5, please check neutron logs for more information. [ 894.050988] env[61974]: ERROR nova.compute.manager [ 894.050988] env[61974]: Traceback (most recent call last): [ 894.050988] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 894.050988] env[61974]: listener.cb(fileno) [ 894.050988] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 894.050988] env[61974]: result = function(*args, **kwargs) [ 894.050988] env[61974]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 894.050988] env[61974]: return func(*args, **kwargs) [ 894.050988] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 894.050988] env[61974]: raise e [ 894.050988] env[61974]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 894.050988] env[61974]: nwinfo = self.network_api.allocate_for_instance( [ 894.050988] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 894.050988] env[61974]: created_port_ids = self._update_ports_for_instance( [ 894.050988] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 894.050988] env[61974]: with excutils.save_and_reraise_exception(): [ 894.050988] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 894.050988] env[61974]: self.force_reraise() [ 894.050988] env[61974]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 894.050988] env[61974]: raise self.value [ 894.050988] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 894.050988] env[61974]: updated_port = self._update_port( [ 894.050988] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 894.050988] env[61974]: _ensure_no_port_binding_failure(port) [ 894.050988] env[61974]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 894.050988] env[61974]: raise exception.PortBindingFailed(port_id=port['id']) [ 894.051652] env[61974]: nova.exception.PortBindingFailed: Binding failed for port 494bb9c4-79d8-4ab3-b214-874dd1e3ceb5, please check neutron logs for more information. [ 894.051652] env[61974]: Removing descriptor: 20 [ 894.119058] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.837s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.119397] env[61974]: ERROR nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a1a08ac6-cfac-4b8a-960d-1160b22d84fc, please check neutron logs for more information. [ 894.119397] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Traceback (most recent call last): [ 894.119397] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 894.119397] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] self.driver.spawn(context, instance, image_meta, [ 894.119397] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 894.119397] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 894.119397] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 894.119397] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] vm_ref = self.build_virtual_machine(instance, [ 894.119397] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 894.119397] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] vif_infos = vmwarevif.get_vif_info(self._session, [ 894.119397] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 894.119692] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] for vif in network_info: [ 894.119692] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 894.119692] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] return self._sync_wrapper(fn, *args, **kwargs) [ 894.119692] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 894.119692] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] self.wait() [ 894.119692] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 894.119692] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] self[:] = self._gt.wait() [ 894.119692] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 894.119692] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] return self._exit_event.wait() [ 894.119692] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 894.119692] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] result = hub.switch() [ 894.119692] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 894.119692] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] return self.greenlet.switch() [ 894.119992] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 894.119992] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] result = function(*args, **kwargs) [ 894.119992] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 894.119992] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] return func(*args, **kwargs) [ 894.119992] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 894.119992] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] raise e [ 894.119992] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 894.119992] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] nwinfo = self.network_api.allocate_for_instance( [ 894.119992] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 894.119992] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] created_port_ids = self._update_ports_for_instance( [ 894.119992] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 894.119992] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] with excutils.save_and_reraise_exception(): [ 894.119992] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 894.120341] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] self.force_reraise() [ 894.120341] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 894.120341] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] raise self.value [ 894.120341] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 894.120341] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] updated_port = self._update_port( [ 894.120341] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 894.120341] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] _ensure_no_port_binding_failure(port) [ 894.120341] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 894.120341] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] raise exception.PortBindingFailed(port_id=port['id']) [ 894.120341] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] nova.exception.PortBindingFailed: Binding failed for port a1a08ac6-cfac-4b8a-960d-1160b22d84fc, please check neutron logs for more information. [ 894.120341] env[61974]: ERROR nova.compute.manager [instance: e9309651-2fcb-40ad-babb-950042fe68f9] [ 894.120649] env[61974]: DEBUG nova.compute.utils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Binding failed for port a1a08ac6-cfac-4b8a-960d-1160b22d84fc, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 894.121673] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.941s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.124584] env[61974]: DEBUG nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Build of instance e9309651-2fcb-40ad-babb-950042fe68f9 was re-scheduled: Binding failed for port a1a08ac6-cfac-4b8a-960d-1160b22d84fc, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 894.124990] env[61974]: DEBUG nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 894.125228] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "refresh_cache-e9309651-2fcb-40ad-babb-950042fe68f9" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.125375] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquired lock "refresh_cache-e9309651-2fcb-40ad-babb-950042fe68f9" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.125532] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 894.139865] env[61974]: DEBUG nova.compute.manager [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 894.302295] env[61974]: DEBUG nova.compute.manager [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 894.331779] env[61974]: DEBUG nova.virt.hardware [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 894.332053] env[61974]: DEBUG nova.virt.hardware [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 894.332218] env[61974]: DEBUG nova.virt.hardware [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 894.332395] env[61974]: DEBUG nova.virt.hardware [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 894.332544] env[61974]: DEBUG nova.virt.hardware [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 894.332681] env[61974]: DEBUG nova.virt.hardware [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 894.333016] env[61974]: DEBUG nova.virt.hardware [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 894.333088] env[61974]: DEBUG nova.virt.hardware [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 894.333237] env[61974]: DEBUG nova.virt.hardware [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 894.333415] env[61974]: DEBUG nova.virt.hardware [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 894.333588] env[61974]: DEBUG nova.virt.hardware [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 894.334454] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa4a7c8-7cab-4185-8675-50a28d671366 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.342898] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b4eb37b-cbc1-4854-9d8a-824d43e204b7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.356413] env[61974]: ERROR nova.compute.manager [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 494bb9c4-79d8-4ab3-b214-874dd1e3ceb5, please check neutron logs for more information. [ 894.356413] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Traceback (most recent call last): [ 894.356413] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 894.356413] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] yield resources [ 894.356413] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 894.356413] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] self.driver.spawn(context, instance, image_meta, [ 894.356413] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 894.356413] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] self._vmops.spawn(context, instance, image_meta, injected_files, [ 894.356413] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 894.356413] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] vm_ref = self.build_virtual_machine(instance, [ 894.356413] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 894.356718] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] vif_infos = vmwarevif.get_vif_info(self._session, [ 894.356718] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 894.356718] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] for vif in network_info: [ 894.356718] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 894.356718] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] return self._sync_wrapper(fn, *args, **kwargs) [ 894.356718] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 894.356718] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] self.wait() [ 894.356718] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 894.356718] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] self[:] = self._gt.wait() [ 894.356718] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 894.356718] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] return self._exit_event.wait() [ 894.356718] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 894.356718] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] current.throw(*self._exc) [ 894.357274] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 894.357274] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] result = function(*args, **kwargs) [ 894.357274] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 894.357274] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] return func(*args, **kwargs) [ 894.357274] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 894.357274] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] raise e [ 894.357274] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 894.357274] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] nwinfo = self.network_api.allocate_for_instance( [ 894.357274] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 894.357274] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] created_port_ids = self._update_ports_for_instance( [ 894.357274] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 894.357274] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] with excutils.save_and_reraise_exception(): [ 894.357274] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 894.358023] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] self.force_reraise() [ 894.358023] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 894.358023] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] raise self.value [ 894.358023] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 894.358023] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] updated_port = self._update_port( [ 894.358023] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 894.358023] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] _ensure_no_port_binding_failure(port) [ 894.358023] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 894.358023] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] raise exception.PortBindingFailed(port_id=port['id']) [ 894.358023] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] nova.exception.PortBindingFailed: Binding failed for port 494bb9c4-79d8-4ab3-b214-874dd1e3ceb5, please check neutron logs for more information. [ 894.358023] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] [ 894.358023] env[61974]: INFO nova.compute.manager [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Terminating instance [ 894.358732] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquiring lock "refresh_cache-68794d97-95f7-4612-9f9f-e370afb3d852" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.375135] env[61974]: DEBUG nova.network.neutron [req-d79bcafe-6551-4210-8e75-4dd923ab0f1f req-3239cfae-a2ee-4c0a-a60d-fe469ba91d71 service nova] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 894.456050] env[61974]: DEBUG nova.network.neutron [req-d79bcafe-6551-4210-8e75-4dd923ab0f1f req-3239cfae-a2ee-4c0a-a60d-fe469ba91d71 service nova] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.649465] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 894.662210] env[61974]: DEBUG oslo_concurrency.lockutils [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.740803] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.930633] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c14924bb-6e03-49a1-ab1e-2235de2a6551 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.939290] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ead922-e148-4ee5-952a-fed37ced04a7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.969174] env[61974]: DEBUG oslo_concurrency.lockutils [req-d79bcafe-6551-4210-8e75-4dd923ab0f1f req-3239cfae-a2ee-4c0a-a60d-fe469ba91d71 service nova] Releasing lock "refresh_cache-68794d97-95f7-4612-9f9f-e370afb3d852" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.969686] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquired lock "refresh_cache-68794d97-95f7-4612-9f9f-e370afb3d852" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.969870] env[61974]: DEBUG nova.network.neutron [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 894.971332] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1db169-83cc-4246-8bc8-5365ed17fb3c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.979231] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0b7aa6-7e21-4c44-9af3-0178ee72352c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.992802] env[61974]: DEBUG nova.compute.provider_tree [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.243303] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Releasing lock "refresh_cache-e9309651-2fcb-40ad-babb-950042fe68f9" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.243566] env[61974]: DEBUG nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 895.243765] env[61974]: DEBUG nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 895.243969] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 895.259479] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 895.496279] env[61974]: DEBUG nova.scheduler.client.report [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 895.503318] env[61974]: DEBUG nova.network.neutron [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 895.621731] env[61974]: DEBUG nova.network.neutron [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.762023] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.923688] env[61974]: DEBUG nova.compute.manager [req-58cf0e30-3c6b-44d3-918d-4442ea733dda req-a3dd009b-de9f-4198-b666-31f7102710d1 service nova] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Received event network-vif-deleted-494bb9c4-79d8-4ab3-b214-874dd1e3ceb5 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 896.006995] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.885s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.007637] env[61974]: ERROR nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ad9c07ae-44e6-4765-9c75-e7304a144938, please check neutron logs for more information. [ 896.007637] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] Traceback (most recent call last): [ 896.007637] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 896.007637] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] self.driver.spawn(context, instance, image_meta, [ 896.007637] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 896.007637] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 896.007637] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 896.007637] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] vm_ref = self.build_virtual_machine(instance, [ 896.007637] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 896.007637] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] vif_infos = vmwarevif.get_vif_info(self._session, [ 896.007637] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 896.008018] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] for vif in network_info: [ 896.008018] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 896.008018] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] return self._sync_wrapper(fn, *args, **kwargs) [ 896.008018] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 896.008018] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] self.wait() [ 896.008018] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 896.008018] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] self[:] = self._gt.wait() [ 896.008018] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 896.008018] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] return self._exit_event.wait() [ 896.008018] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 896.008018] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] current.throw(*self._exc) [ 896.008018] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 896.008018] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] result = function(*args, **kwargs) [ 896.008381] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 896.008381] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] return func(*args, **kwargs) [ 896.008381] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 896.008381] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] raise e [ 896.008381] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 896.008381] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] nwinfo = self.network_api.allocate_for_instance( [ 896.008381] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 896.008381] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] created_port_ids = self._update_ports_for_instance( [ 896.008381] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 896.008381] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] with excutils.save_and_reraise_exception(): [ 896.008381] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 896.008381] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] self.force_reraise() [ 896.008381] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 896.008747] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] raise self.value [ 896.008747] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 896.008747] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] updated_port = self._update_port( [ 896.008747] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 896.008747] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] _ensure_no_port_binding_failure(port) [ 896.008747] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 896.008747] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] raise exception.PortBindingFailed(port_id=port['id']) [ 896.008747] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] nova.exception.PortBindingFailed: Binding failed for port ad9c07ae-44e6-4765-9c75-e7304a144938, please check neutron logs for more information. [ 896.008747] env[61974]: ERROR nova.compute.manager [instance: b2d442b2-1927-481c-a232-8514444004a7] [ 896.008747] env[61974]: DEBUG nova.compute.utils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Binding failed for port ad9c07ae-44e6-4765-9c75-e7304a144938, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 896.009952] env[61974]: DEBUG oslo_concurrency.lockutils [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.210s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.012747] env[61974]: DEBUG nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Build of instance b2d442b2-1927-481c-a232-8514444004a7 was re-scheduled: Binding failed for port ad9c07ae-44e6-4765-9c75-e7304a144938, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 896.013177] env[61974]: DEBUG nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 896.013401] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "refresh_cache-b2d442b2-1927-481c-a232-8514444004a7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.013549] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquired lock "refresh_cache-b2d442b2-1927-481c-a232-8514444004a7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.013706] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 896.124659] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Releasing lock "refresh_cache-68794d97-95f7-4612-9f9f-e370afb3d852" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.125133] env[61974]: DEBUG nova.compute.manager [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 896.125337] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 896.125644] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6629025a-4141-451c-b3fe-dbab04a03bfd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.134703] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e5829c6-e79a-4110-afd3-e1679f5e26ce {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.158439] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 68794d97-95f7-4612-9f9f-e370afb3d852 could not be found. [ 896.158683] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 896.158869] env[61974]: INFO nova.compute.manager [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Took 0.03 seconds to destroy the instance on the hypervisor. [ 896.159706] env[61974]: DEBUG oslo.service.loopingcall [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 896.159706] env[61974]: DEBUG nova.compute.manager [-] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 896.159706] env[61974]: DEBUG nova.network.neutron [-] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 896.175162] env[61974]: DEBUG nova.network.neutron [-] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 896.264026] env[61974]: INFO nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: e9309651-2fcb-40ad-babb-950042fe68f9] Took 1.02 seconds to deallocate network for instance. [ 896.537010] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 896.628133] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.677976] env[61974]: DEBUG nova.network.neutron [-] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.796768] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6279ca91-2441-4451-b245-86fbcd9b619a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.804874] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-672cc7d3-6141-4b44-a857-689f764dd1ff {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.835781] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea41d6b-1ae4-4147-8fa2-47a8d1b394f3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.843224] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c362a9-1543-48dc-9169-d355246ab1ca {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.857668] env[61974]: DEBUG nova.compute.provider_tree [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 897.130941] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Releasing lock "refresh_cache-b2d442b2-1927-481c-a232-8514444004a7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.131195] env[61974]: DEBUG nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 897.131380] env[61974]: DEBUG nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 897.131546] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 897.153780] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 897.180928] env[61974]: INFO nova.compute.manager [-] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Took 1.02 seconds to deallocate network for instance. [ 897.183388] env[61974]: DEBUG nova.compute.claims [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Aborting claim: {{(pid=61974) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 897.183569] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.302490] env[61974]: INFO nova.scheduler.client.report [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Deleted allocations for instance e9309651-2fcb-40ad-babb-950042fe68f9 [ 897.361450] env[61974]: DEBUG nova.scheduler.client.report [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 897.658067] env[61974]: DEBUG nova.network.neutron [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.811790] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "e9309651-2fcb-40ad-babb-950042fe68f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 156.152s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.869034] env[61974]: DEBUG oslo_concurrency.lockutils [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.857s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.869034] env[61974]: ERROR nova.compute.manager [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e078359d-2401-4255-a671-abea7c89c9ed, please check neutron logs for more information. [ 897.869034] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Traceback (most recent call last): [ 897.869034] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 897.869034] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] self.driver.spawn(context, instance, image_meta, [ 897.869034] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 897.869034] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 897.869034] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 897.869034] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] vm_ref = self.build_virtual_machine(instance, [ 897.869341] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 897.869341] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] vif_infos = vmwarevif.get_vif_info(self._session, [ 897.869341] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 897.869341] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] for vif in network_info: [ 897.869341] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 897.869341] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] return self._sync_wrapper(fn, *args, **kwargs) [ 897.869341] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 897.869341] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] self.wait() [ 897.869341] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 897.869341] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] self[:] = self._gt.wait() [ 897.869341] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 897.869341] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] return self._exit_event.wait() [ 897.869341] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 897.869687] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] result = hub.switch() [ 897.869687] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 897.869687] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] return self.greenlet.switch() [ 897.869687] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 897.869687] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] result = function(*args, **kwargs) [ 897.869687] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 897.869687] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] return func(*args, **kwargs) [ 897.869687] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 897.869687] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] raise e [ 897.869687] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 897.869687] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] nwinfo = self.network_api.allocate_for_instance( [ 897.869687] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 897.869687] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] created_port_ids = self._update_ports_for_instance( [ 897.869978] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 897.869978] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] with excutils.save_and_reraise_exception(): [ 897.869978] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 897.869978] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] self.force_reraise() [ 897.869978] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 897.869978] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] raise self.value [ 897.869978] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 897.869978] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] updated_port = self._update_port( [ 897.869978] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 897.869978] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] _ensure_no_port_binding_failure(port) [ 897.869978] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 897.869978] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] raise exception.PortBindingFailed(port_id=port['id']) [ 897.870273] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] nova.exception.PortBindingFailed: Binding failed for port e078359d-2401-4255-a671-abea7c89c9ed, please check neutron logs for more information. [ 897.870273] env[61974]: ERROR nova.compute.manager [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] [ 897.870273] env[61974]: DEBUG nova.compute.utils [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Binding failed for port e078359d-2401-4255-a671-abea7c89c9ed, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 897.872882] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.711s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.873234] env[61974]: INFO nova.compute.claims [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 897.877478] env[61974]: DEBUG nova.compute.manager [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Build of instance db03b815-295a-4a66-9afd-a1f4ba97601f was re-scheduled: Binding failed for port e078359d-2401-4255-a671-abea7c89c9ed, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 897.878447] env[61974]: DEBUG nova.compute.manager [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 897.878812] env[61974]: DEBUG oslo_concurrency.lockutils [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "refresh_cache-db03b815-295a-4a66-9afd-a1f4ba97601f" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.879274] env[61974]: DEBUG oslo_concurrency.lockutils [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired lock "refresh_cache-db03b815-295a-4a66-9afd-a1f4ba97601f" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.879832] env[61974]: DEBUG nova.network.neutron [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 898.163245] env[61974]: INFO nova.compute.manager [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: b2d442b2-1927-481c-a232-8514444004a7] Took 1.03 seconds to deallocate network for instance. [ 898.313146] env[61974]: DEBUG nova.compute.manager [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 898.397881] env[61974]: DEBUG nova.network.neutron [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 898.489941] env[61974]: DEBUG nova.network.neutron [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.839243] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 898.991358] env[61974]: DEBUG oslo_concurrency.lockutils [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Releasing lock "refresh_cache-db03b815-295a-4a66-9afd-a1f4ba97601f" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.991594] env[61974]: DEBUG nova.compute.manager [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 898.991777] env[61974]: DEBUG nova.compute.manager [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 898.991946] env[61974]: DEBUG nova.network.neutron [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 899.008102] env[61974]: DEBUG nova.network.neutron [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 899.131142] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6929d63-2785-474f-bf6b-fa5bea7c65a0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.139944] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-853573df-fcb5-4581-a988-e0d8470f4356 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.172501] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641aa6eb-cc16-4476-90d3-66ef8d50d0ca {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.179737] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6fbe9a-b5e3-4cd3-a485-de1e08bfa915 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.192985] env[61974]: DEBUG nova.compute.provider_tree [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 899.194906] env[61974]: INFO nova.scheduler.client.report [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Deleted allocations for instance b2d442b2-1927-481c-a232-8514444004a7 [ 899.510839] env[61974]: DEBUG nova.network.neutron [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.701834] env[61974]: DEBUG nova.scheduler.client.report [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 899.706066] env[61974]: DEBUG oslo_concurrency.lockutils [None req-17c5a06b-8b3f-453b-8464-b0358bd5abef tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "b2d442b2-1927-481c-a232-8514444004a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 158.024s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.013196] env[61974]: INFO nova.compute.manager [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: db03b815-295a-4a66-9afd-a1f4ba97601f] Took 1.02 seconds to deallocate network for instance. [ 900.206843] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.335s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.207460] env[61974]: DEBUG nova.compute.manager [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 900.210168] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.312s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.213054] env[61974]: DEBUG nova.compute.manager [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 900.716831] env[61974]: DEBUG nova.compute.utils [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 900.721723] env[61974]: DEBUG nova.compute.manager [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 900.721723] env[61974]: DEBUG nova.network.neutron [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 900.738530] env[61974]: DEBUG oslo_concurrency.lockutils [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.784459] env[61974]: DEBUG nova.policy [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10148b3369de4608b6f73226b86dc02e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fcd4aebfaf0f494c98cde099c7e28363', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 900.969744] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e93415dd-a0cb-4fae-a778-2b7807bbd8ab {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.977530] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005336ea-706b-4899-b4d0-24271952b4ba {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.007041] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fdd5264-a500-42af-af50-d7022555396d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.015683] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9b9551-efa8-4219-8e20-f18712516a44 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.030325] env[61974]: DEBUG nova.compute.provider_tree [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 901.050192] env[61974]: INFO nova.scheduler.client.report [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Deleted allocations for instance db03b815-295a-4a66-9afd-a1f4ba97601f [ 901.223551] env[61974]: DEBUG nova.compute.manager [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 901.277662] env[61974]: DEBUG nova.network.neutron [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Successfully created port: b3427315-9e46-4452-9a4b-43147b6cbc63 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 901.533703] env[61974]: DEBUG nova.scheduler.client.report [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 901.558564] env[61974]: DEBUG oslo_concurrency.lockutils [None req-000f6dcc-92b3-4b4f-9ebc-2851463aca1d tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "db03b815-295a-4a66-9afd-a1f4ba97601f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 149.285s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.851187] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "eb6dfd21-0ba6-455c-b14e-80dacaf6b92c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.851423] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "eb6dfd21-0ba6-455c-b14e-80dacaf6b92c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.877335] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.877538] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.040076] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.830s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.040723] env[61974]: ERROR nova.compute.manager [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0c8a9cb4-ec58-4b0e-ac7f-8b476307722b, please check neutron logs for more information. [ 902.040723] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Traceback (most recent call last): [ 902.040723] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 902.040723] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] self.driver.spawn(context, instance, image_meta, [ 902.040723] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 902.040723] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 902.040723] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 902.040723] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] vm_ref = self.build_virtual_machine(instance, [ 902.040723] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 902.040723] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] vif_infos = vmwarevif.get_vif_info(self._session, [ 902.040723] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 902.041100] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] for vif in network_info: [ 902.041100] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 902.041100] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] return self._sync_wrapper(fn, *args, **kwargs) [ 902.041100] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 902.041100] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] self.wait() [ 902.041100] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 902.041100] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] self[:] = self._gt.wait() [ 902.041100] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 902.041100] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] return self._exit_event.wait() [ 902.041100] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 902.041100] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] current.throw(*self._exc) [ 902.041100] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 902.041100] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] result = function(*args, **kwargs) [ 902.041451] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 902.041451] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] return func(*args, **kwargs) [ 902.041451] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 902.041451] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] raise e [ 902.041451] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 902.041451] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] nwinfo = self.network_api.allocate_for_instance( [ 902.041451] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 902.041451] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] created_port_ids = self._update_ports_for_instance( [ 902.041451] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 902.041451] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] with excutils.save_and_reraise_exception(): [ 902.041451] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 902.041451] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] self.force_reraise() [ 902.041451] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 902.041807] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] raise self.value [ 902.041807] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 902.041807] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] updated_port = self._update_port( [ 902.041807] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 902.041807] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] _ensure_no_port_binding_failure(port) [ 902.041807] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 902.041807] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] raise exception.PortBindingFailed(port_id=port['id']) [ 902.041807] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] nova.exception.PortBindingFailed: Binding failed for port 0c8a9cb4-ec58-4b0e-ac7f-8b476307722b, please check neutron logs for more information. [ 902.041807] env[61974]: ERROR nova.compute.manager [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] [ 902.041807] env[61974]: DEBUG nova.compute.utils [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Binding failed for port 0c8a9cb4-ec58-4b0e-ac7f-8b476307722b, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 902.042683] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.999s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.046262] env[61974]: DEBUG nova.compute.manager [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Build of instance 5013beda-7f34-44fe-9159-f04e0aca5bce was re-scheduled: Binding failed for port 0c8a9cb4-ec58-4b0e-ac7f-8b476307722b, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 902.046696] env[61974]: DEBUG nova.compute.manager [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 902.047751] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Acquiring lock "refresh_cache-5013beda-7f34-44fe-9159-f04e0aca5bce" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.047751] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Acquired lock "refresh_cache-5013beda-7f34-44fe-9159-f04e0aca5bce" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.047751] env[61974]: DEBUG nova.network.neutron [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 902.060560] env[61974]: DEBUG nova.compute.manager [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 902.233936] env[61974]: DEBUG nova.compute.manager [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 902.261035] env[61974]: DEBUG nova.virt.hardware [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 902.261280] env[61974]: DEBUG nova.virt.hardware [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 902.261490] env[61974]: DEBUG nova.virt.hardware [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 902.261831] env[61974]: DEBUG nova.virt.hardware [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 902.261933] env[61974]: DEBUG nova.virt.hardware [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 902.261999] env[61974]: DEBUG nova.virt.hardware [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 902.262286] env[61974]: DEBUG nova.virt.hardware [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 902.262556] env[61974]: DEBUG nova.virt.hardware [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 902.262778] env[61974]: DEBUG nova.virt.hardware [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 902.262980] env[61974]: DEBUG nova.virt.hardware [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 902.263205] env[61974]: DEBUG nova.virt.hardware [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 902.264150] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8083daa9-c5b9-4825-9966-17a0e4e1537f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.272044] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea644d8-24c0-4f89-985b-124fc81d8aa5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.570130] env[61974]: DEBUG nova.network.neutron [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 902.577896] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.752170] env[61974]: DEBUG nova.network.neutron [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.825273] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f6a518-61fc-4bca-bd0b-1a9f58f4d974 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.834576] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd166491-826c-4957-a8da-6067f29b257a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.868449] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eedab688-0f3b-46e3-b724-47bf69626ca0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.877399] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a39b08e-6934-453d-b43b-a0e8705d528c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.893201] env[61974]: DEBUG nova.compute.provider_tree [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 902.956232] env[61974]: DEBUG nova.compute.manager [req-9cc5156f-f6fb-421f-9679-ca6a86a4f489 req-6a3e0a36-7647-4144-b73f-0176de769485 service nova] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Received event network-vif-plugged-b3427315-9e46-4452-9a4b-43147b6cbc63 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 902.957768] env[61974]: DEBUG oslo_concurrency.lockutils [req-9cc5156f-f6fb-421f-9679-ca6a86a4f489 req-6a3e0a36-7647-4144-b73f-0176de769485 service nova] Acquiring lock "a9edbd98-3e67-476b-934d-15d893a62d02-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.957768] env[61974]: DEBUG oslo_concurrency.lockutils [req-9cc5156f-f6fb-421f-9679-ca6a86a4f489 req-6a3e0a36-7647-4144-b73f-0176de769485 service nova] Lock "a9edbd98-3e67-476b-934d-15d893a62d02-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.957768] env[61974]: DEBUG oslo_concurrency.lockutils [req-9cc5156f-f6fb-421f-9679-ca6a86a4f489 req-6a3e0a36-7647-4144-b73f-0176de769485 service nova] Lock "a9edbd98-3e67-476b-934d-15d893a62d02-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.957768] env[61974]: DEBUG nova.compute.manager [req-9cc5156f-f6fb-421f-9679-ca6a86a4f489 req-6a3e0a36-7647-4144-b73f-0176de769485 service nova] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] No waiting events found dispatching network-vif-plugged-b3427315-9e46-4452-9a4b-43147b6cbc63 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 902.957768] env[61974]: WARNING nova.compute.manager [req-9cc5156f-f6fb-421f-9679-ca6a86a4f489 req-6a3e0a36-7647-4144-b73f-0176de769485 service nova] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Received unexpected event network-vif-plugged-b3427315-9e46-4452-9a4b-43147b6cbc63 for instance with vm_state building and task_state spawning. [ 903.009702] env[61974]: DEBUG nova.network.neutron [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Successfully updated port: b3427315-9e46-4452-9a4b-43147b6cbc63 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 903.254531] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Releasing lock "refresh_cache-5013beda-7f34-44fe-9159-f04e0aca5bce" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.254879] env[61974]: DEBUG nova.compute.manager [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 903.254916] env[61974]: DEBUG nova.compute.manager [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 903.255110] env[61974]: DEBUG nova.network.neutron [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 903.271139] env[61974]: DEBUG nova.network.neutron [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 903.395612] env[61974]: DEBUG nova.scheduler.client.report [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 903.454651] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "c06a7599-58e8-4796-9e95-d96327f649d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.455187] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "c06a7599-58e8-4796-9e95-d96327f649d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.512700] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquiring lock "refresh_cache-a9edbd98-3e67-476b-934d-15d893a62d02" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 903.512850] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquired lock "refresh_cache-a9edbd98-3e67-476b-934d-15d893a62d02" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.513014] env[61974]: DEBUG nova.network.neutron [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 903.773447] env[61974]: DEBUG nova.network.neutron [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.900894] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.858s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.901557] env[61974]: ERROR nova.compute.manager [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 53074fd5-f21e-4f14-b97c-4eb2f69b4fa6, please check neutron logs for more information. [ 903.901557] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Traceback (most recent call last): [ 903.901557] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 903.901557] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] self.driver.spawn(context, instance, image_meta, [ 903.901557] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 903.901557] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 903.901557] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 903.901557] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] vm_ref = self.build_virtual_machine(instance, [ 903.901557] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 903.901557] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] vif_infos = vmwarevif.get_vif_info(self._session, [ 903.901557] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 903.901914] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] for vif in network_info: [ 903.901914] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 903.901914] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] return self._sync_wrapper(fn, *args, **kwargs) [ 903.901914] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 903.901914] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] self.wait() [ 903.901914] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 903.901914] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] self[:] = self._gt.wait() [ 903.901914] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 903.901914] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] return self._exit_event.wait() [ 903.901914] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 903.901914] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] current.throw(*self._exc) [ 903.901914] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 903.901914] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] result = function(*args, **kwargs) [ 903.902246] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 903.902246] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] return func(*args, **kwargs) [ 903.902246] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 903.902246] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] raise e [ 903.902246] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 903.902246] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] nwinfo = self.network_api.allocate_for_instance( [ 903.902246] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 903.902246] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] created_port_ids = self._update_ports_for_instance( [ 903.902246] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 903.902246] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] with excutils.save_and_reraise_exception(): [ 903.902246] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 903.902246] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] self.force_reraise() [ 903.902246] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 903.902556] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] raise self.value [ 903.902556] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 903.902556] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] updated_port = self._update_port( [ 903.902556] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 903.902556] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] _ensure_no_port_binding_failure(port) [ 903.902556] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 903.902556] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] raise exception.PortBindingFailed(port_id=port['id']) [ 903.902556] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] nova.exception.PortBindingFailed: Binding failed for port 53074fd5-f21e-4f14-b97c-4eb2f69b4fa6, please check neutron logs for more information. [ 903.902556] env[61974]: ERROR nova.compute.manager [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] [ 903.902556] env[61974]: DEBUG nova.compute.utils [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Binding failed for port 53074fd5-f21e-4f14-b97c-4eb2f69b4fa6, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 903.903637] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.984s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.907373] env[61974]: DEBUG nova.compute.manager [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Build of instance b62397bb-95b4-4d07-819a-bfcfd7c6a38e was re-scheduled: Binding failed for port 53074fd5-f21e-4f14-b97c-4eb2f69b4fa6, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 903.907813] env[61974]: DEBUG nova.compute.manager [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 903.908372] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Acquiring lock "refresh_cache-b62397bb-95b4-4d07-819a-bfcfd7c6a38e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 903.908372] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Acquired lock "refresh_cache-b62397bb-95b4-4d07-819a-bfcfd7c6a38e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.908478] env[61974]: DEBUG nova.network.neutron [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 904.044267] env[61974]: DEBUG nova.network.neutron [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 904.227511] env[61974]: DEBUG nova.network.neutron [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Updating instance_info_cache with network_info: [{"id": "b3427315-9e46-4452-9a4b-43147b6cbc63", "address": "fa:16:3e:8c:21:6a", "network": {"id": "c7e214e9-6a9e-4dac-bc27-2bcd615ff93c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1569316101-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fcd4aebfaf0f494c98cde099c7e28363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "767a3a48-41d4-4a0c-961d-0024837f63bd", "external-id": "nsx-vlan-transportzone-141", "segmentation_id": 141, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3427315-9e", "ovs_interfaceid": "b3427315-9e46-4452-9a4b-43147b6cbc63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.277736] env[61974]: INFO nova.compute.manager [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] [instance: 5013beda-7f34-44fe-9159-f04e0aca5bce] Took 1.02 seconds to deallocate network for instance. [ 904.427361] env[61974]: DEBUG nova.network.neutron [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 904.522770] env[61974]: DEBUG nova.network.neutron [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.666771] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28ba8cc-dd4c-4a2b-a519-0e1aa22bfd44 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.674595] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e163b8-9beb-4272-8d4b-3d1f93b7b4e0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.703364] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b658ce-4a27-4dc4-83c9-ce429d547718 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.710699] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0cc2267-191f-4de4-b49c-69225af62aab {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.723332] env[61974]: DEBUG nova.compute.provider_tree [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.730376] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Releasing lock "refresh_cache-a9edbd98-3e67-476b-934d-15d893a62d02" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 904.730656] env[61974]: DEBUG nova.compute.manager [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Instance network_info: |[{"id": "b3427315-9e46-4452-9a4b-43147b6cbc63", "address": "fa:16:3e:8c:21:6a", "network": {"id": "c7e214e9-6a9e-4dac-bc27-2bcd615ff93c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1569316101-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fcd4aebfaf0f494c98cde099c7e28363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "767a3a48-41d4-4a0c-961d-0024837f63bd", "external-id": "nsx-vlan-transportzone-141", "segmentation_id": 141, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3427315-9e", "ovs_interfaceid": "b3427315-9e46-4452-9a4b-43147b6cbc63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 904.731092] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:21:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '767a3a48-41d4-4a0c-961d-0024837f63bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b3427315-9e46-4452-9a4b-43147b6cbc63', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 904.738604] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Creating folder: Project (fcd4aebfaf0f494c98cde099c7e28363). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 904.739421] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-575420f1-972e-45d2-838f-bff280d0cd20 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.751378] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Created folder: Project (fcd4aebfaf0f494c98cde099c7e28363) in parent group-v292912. [ 904.751558] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Creating folder: Instances. Parent ref: group-v292936. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 904.751807] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-85f66766-ac2f-49b9-8fcb-08747b118022 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.761217] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Created folder: Instances in parent group-v292936. [ 904.761460] env[61974]: DEBUG oslo.service.loopingcall [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 904.761641] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 904.761829] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d2e94f94-3f6d-4947-9e35-5bd4b4dedc7c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.784351] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 904.784351] env[61974]: value = "task-1378987" [ 904.784351] env[61974]: _type = "Task" [ 904.784351] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.791864] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1378987, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.980292] env[61974]: DEBUG nova.compute.manager [req-a87b46a8-c07d-41d8-826e-44b9ab2c1d62 req-462e14ee-7307-4d51-86a3-474340e49054 service nova] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Received event network-changed-b3427315-9e46-4452-9a4b-43147b6cbc63 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 904.980644] env[61974]: DEBUG nova.compute.manager [req-a87b46a8-c07d-41d8-826e-44b9ab2c1d62 req-462e14ee-7307-4d51-86a3-474340e49054 service nova] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Refreshing instance network info cache due to event network-changed-b3427315-9e46-4452-9a4b-43147b6cbc63. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 904.981034] env[61974]: DEBUG oslo_concurrency.lockutils [req-a87b46a8-c07d-41d8-826e-44b9ab2c1d62 req-462e14ee-7307-4d51-86a3-474340e49054 service nova] Acquiring lock "refresh_cache-a9edbd98-3e67-476b-934d-15d893a62d02" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.981321] env[61974]: DEBUG oslo_concurrency.lockutils [req-a87b46a8-c07d-41d8-826e-44b9ab2c1d62 req-462e14ee-7307-4d51-86a3-474340e49054 service nova] Acquired lock "refresh_cache-a9edbd98-3e67-476b-934d-15d893a62d02" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.981611] env[61974]: DEBUG nova.network.neutron [req-a87b46a8-c07d-41d8-826e-44b9ab2c1d62 req-462e14ee-7307-4d51-86a3-474340e49054 service nova] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Refreshing network info cache for port b3427315-9e46-4452-9a4b-43147b6cbc63 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 905.028338] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Releasing lock "refresh_cache-b62397bb-95b4-4d07-819a-bfcfd7c6a38e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.028700] env[61974]: DEBUG nova.compute.manager [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 905.028904] env[61974]: DEBUG nova.compute.manager [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 905.030048] env[61974]: DEBUG nova.network.neutron [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 905.044706] env[61974]: DEBUG nova.network.neutron [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 905.226110] env[61974]: DEBUG nova.scheduler.client.report [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 905.297060] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1378987, 'name': CreateVM_Task, 'duration_secs': 0.291983} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.297373] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 905.303815] env[61974]: INFO nova.scheduler.client.report [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Deleted allocations for instance 5013beda-7f34-44fe-9159-f04e0aca5bce [ 905.309945] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.310124] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.310471] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 905.310704] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-732abf13-e3c7-4cfc-9d37-799b2643b156 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.316251] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Waiting for the task: (returnval){ [ 905.316251] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52f60f33-1420-c251-f674-6319ce2bd1a9" [ 905.316251] env[61974]: _type = "Task" [ 905.316251] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.324022] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52f60f33-1420-c251-f674-6319ce2bd1a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.547015] env[61974]: DEBUG nova.network.neutron [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.709150] env[61974]: DEBUG nova.network.neutron [req-a87b46a8-c07d-41d8-826e-44b9ab2c1d62 req-462e14ee-7307-4d51-86a3-474340e49054 service nova] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Updated VIF entry in instance network info cache for port b3427315-9e46-4452-9a4b-43147b6cbc63. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 905.709533] env[61974]: DEBUG nova.network.neutron [req-a87b46a8-c07d-41d8-826e-44b9ab2c1d62 req-462e14ee-7307-4d51-86a3-474340e49054 service nova] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Updating instance_info_cache with network_info: [{"id": "b3427315-9e46-4452-9a4b-43147b6cbc63", "address": "fa:16:3e:8c:21:6a", "network": {"id": "c7e214e9-6a9e-4dac-bc27-2bcd615ff93c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1569316101-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fcd4aebfaf0f494c98cde099c7e28363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "767a3a48-41d4-4a0c-961d-0024837f63bd", "external-id": "nsx-vlan-transportzone-141", "segmentation_id": 141, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3427315-9e", "ovs_interfaceid": "b3427315-9e46-4452-9a4b-43147b6cbc63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.730830] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.827s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.731478] env[61974]: ERROR nova.compute.manager [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fd2c009c-6335-46c7-a75c-2537847bbe48, please check neutron logs for more information. [ 905.731478] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Traceback (most recent call last): [ 905.731478] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 905.731478] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] self.driver.spawn(context, instance, image_meta, [ 905.731478] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 905.731478] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] self._vmops.spawn(context, instance, image_meta, injected_files, [ 905.731478] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 905.731478] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] vm_ref = self.build_virtual_machine(instance, [ 905.731478] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 905.731478] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] vif_infos = vmwarevif.get_vif_info(self._session, [ 905.731478] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 905.731782] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] for vif in network_info: [ 905.731782] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 905.731782] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] return self._sync_wrapper(fn, *args, **kwargs) [ 905.731782] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 905.731782] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] self.wait() [ 905.731782] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 905.731782] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] self[:] = self._gt.wait() [ 905.731782] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 905.731782] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] return self._exit_event.wait() [ 905.731782] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 905.731782] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] result = hub.switch() [ 905.731782] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 905.731782] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] return self.greenlet.switch() [ 905.733949] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 905.733949] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] result = function(*args, **kwargs) [ 905.733949] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 905.733949] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] return func(*args, **kwargs) [ 905.733949] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 905.733949] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] raise e [ 905.733949] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 905.733949] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] nwinfo = self.network_api.allocate_for_instance( [ 905.733949] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 905.733949] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] created_port_ids = self._update_ports_for_instance( [ 905.733949] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 905.733949] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] with excutils.save_and_reraise_exception(): [ 905.733949] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 905.734285] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] self.force_reraise() [ 905.734285] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 905.734285] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] raise self.value [ 905.734285] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 905.734285] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] updated_port = self._update_port( [ 905.734285] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 905.734285] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] _ensure_no_port_binding_failure(port) [ 905.734285] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 905.734285] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] raise exception.PortBindingFailed(port_id=port['id']) [ 905.734285] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] nova.exception.PortBindingFailed: Binding failed for port fd2c009c-6335-46c7-a75c-2537847bbe48, please check neutron logs for more information. [ 905.734285] env[61974]: ERROR nova.compute.manager [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] [ 905.734568] env[61974]: DEBUG nova.compute.utils [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Binding failed for port fd2c009c-6335-46c7-a75c-2537847bbe48, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 905.734568] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.324s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.738132] env[61974]: DEBUG nova.compute.manager [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Build of instance 11d4f981-b167-4c81-9cd7-7e939606d400 was re-scheduled: Binding failed for port fd2c009c-6335-46c7-a75c-2537847bbe48, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 905.738132] env[61974]: DEBUG nova.compute.manager [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 905.738132] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Acquiring lock "refresh_cache-11d4f981-b167-4c81-9cd7-7e939606d400" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.738132] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Acquired lock "refresh_cache-11d4f981-b167-4c81-9cd7-7e939606d400" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.738298] env[61974]: DEBUG nova.network.neutron [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 905.814508] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f70feacd-ed07-4619-b752-021749b65518 tempest-ServerRescueTestJSON-1691265003 tempest-ServerRescueTestJSON-1691265003-project-member] Lock "5013beda-7f34-44fe-9159-f04e0aca5bce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 146.294s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.827019] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52f60f33-1420-c251-f674-6319ce2bd1a9, 'name': SearchDatastore_Task, 'duration_secs': 0.009441} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.827900] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.828218] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 905.828454] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.828601] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.828778] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 905.829282] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd3a5fba-ff23-4273-84b5-63307ab0de29 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.837524] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 905.837699] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 905.838420] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdddc5bf-d719-4bf0-9199-4c90e4816d32 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.843432] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Waiting for the task: (returnval){ [ 905.843432] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5246acce-7523-c9be-9300-c341a7fb4a71" [ 905.843432] env[61974]: _type = "Task" [ 905.843432] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.852954] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5246acce-7523-c9be-9300-c341a7fb4a71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.049703] env[61974]: INFO nova.compute.manager [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] [instance: b62397bb-95b4-4d07-819a-bfcfd7c6a38e] Took 1.02 seconds to deallocate network for instance. [ 906.212602] env[61974]: DEBUG oslo_concurrency.lockutils [req-a87b46a8-c07d-41d8-826e-44b9ab2c1d62 req-462e14ee-7307-4d51-86a3-474340e49054 service nova] Releasing lock "refresh_cache-a9edbd98-3e67-476b-934d-15d893a62d02" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.268931] env[61974]: DEBUG nova.network.neutron [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 906.317476] env[61974]: DEBUG nova.compute.manager [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 906.355758] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5246acce-7523-c9be-9300-c341a7fb4a71, 'name': SearchDatastore_Task, 'duration_secs': 0.028253} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.357631] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae2b259d-d41c-46eb-b362-dd5f564410ed {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.367541] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Waiting for the task: (returnval){ [ 906.367541] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52366f35-0275-f29f-9a27-4334fe368a08" [ 906.367541] env[61974]: _type = "Task" [ 906.367541] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.377524] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52366f35-0275-f29f-9a27-4334fe368a08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.379025] env[61974]: DEBUG nova.network.neutron [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.501432] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a9b01bd-93c6-40d3-8f7c-803309123081 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.507164] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83113815-5386-4dca-819a-d6300734357f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.538695] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e890ae-5a52-4a17-ac22-249137e6f5b9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.546305] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f921cc8c-6b19-44fb-94e7-ab82470243c7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.563788] env[61974]: DEBUG nova.compute.provider_tree [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 906.846918] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.880226] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52366f35-0275-f29f-9a27-4334fe368a08, 'name': SearchDatastore_Task, 'duration_secs': 0.011716} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.881159] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Releasing lock "refresh_cache-11d4f981-b167-4c81-9cd7-7e939606d400" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.881410] env[61974]: DEBUG nova.compute.manager [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 906.881597] env[61974]: DEBUG nova.compute.manager [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 906.881782] env[61974]: DEBUG nova.network.neutron [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 906.883577] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.883833] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] a9edbd98-3e67-476b-934d-15d893a62d02/a9edbd98-3e67-476b-934d-15d893a62d02.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 906.884124] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af3127e4-b974-4239-9a27-60dca95b1329 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.891045] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Waiting for the task: (returnval){ [ 906.891045] env[61974]: value = "task-1378988" [ 906.891045] env[61974]: _type = "Task" [ 906.891045] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.898533] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1378988, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.907805] env[61974]: DEBUG nova.network.neutron [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 907.070280] env[61974]: DEBUG nova.scheduler.client.report [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 907.099307] env[61974]: INFO nova.scheduler.client.report [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Deleted allocations for instance b62397bb-95b4-4d07-819a-bfcfd7c6a38e [ 907.402005] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1378988, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485509} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.402271] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] a9edbd98-3e67-476b-934d-15d893a62d02/a9edbd98-3e67-476b-934d-15d893a62d02.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 907.402479] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 907.402729] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c4dd36b9-ac31-4533-ad42-ea6d028b4773 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.409400] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Waiting for the task: (returnval){ [ 907.409400] env[61974]: value = "task-1378989" [ 907.409400] env[61974]: _type = "Task" [ 907.409400] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.409797] env[61974]: DEBUG nova.network.neutron [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.419527] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1378989, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.575255] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.842s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.575888] env[61974]: ERROR nova.compute.manager [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 385f568f-5c08-4f71-861a-98148d22e849, please check neutron logs for more information. [ 907.575888] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Traceback (most recent call last): [ 907.575888] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 907.575888] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] self.driver.spawn(context, instance, image_meta, [ 907.575888] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 907.575888] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 907.575888] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 907.575888] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] vm_ref = self.build_virtual_machine(instance, [ 907.575888] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 907.575888] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] vif_infos = vmwarevif.get_vif_info(self._session, [ 907.575888] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 907.576252] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] for vif in network_info: [ 907.576252] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 907.576252] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] return self._sync_wrapper(fn, *args, **kwargs) [ 907.576252] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 907.576252] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] self.wait() [ 907.576252] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 907.576252] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] self[:] = self._gt.wait() [ 907.576252] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 907.576252] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] return self._exit_event.wait() [ 907.576252] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 907.576252] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] current.throw(*self._exc) [ 907.576252] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 907.576252] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] result = function(*args, **kwargs) [ 907.576788] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 907.576788] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] return func(*args, **kwargs) [ 907.576788] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 907.576788] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] raise e [ 907.576788] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 907.576788] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] nwinfo = self.network_api.allocate_for_instance( [ 907.576788] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 907.576788] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] created_port_ids = self._update_ports_for_instance( [ 907.576788] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 907.576788] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] with excutils.save_and_reraise_exception(): [ 907.576788] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 907.576788] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] self.force_reraise() [ 907.576788] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 907.577189] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] raise self.value [ 907.577189] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 907.577189] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] updated_port = self._update_port( [ 907.577189] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 907.577189] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] _ensure_no_port_binding_failure(port) [ 907.577189] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 907.577189] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] raise exception.PortBindingFailed(port_id=port['id']) [ 907.577189] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] nova.exception.PortBindingFailed: Binding failed for port 385f568f-5c08-4f71-861a-98148d22e849, please check neutron logs for more information. [ 907.577189] env[61974]: ERROR nova.compute.manager [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] [ 907.577189] env[61974]: DEBUG nova.compute.utils [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Binding failed for port 385f568f-5c08-4f71-861a-98148d22e849, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 907.577897] env[61974]: DEBUG oslo_concurrency.lockutils [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.916s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.580033] env[61974]: INFO nova.compute.claims [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 907.582663] env[61974]: DEBUG nova.compute.manager [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Build of instance 0a62f878-43c1-4aaf-9054-798572b4faa7 was re-scheduled: Binding failed for port 385f568f-5c08-4f71-861a-98148d22e849, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 907.583232] env[61974]: DEBUG nova.compute.manager [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 907.583481] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Acquiring lock "refresh_cache-0a62f878-43c1-4aaf-9054-798572b4faa7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.583629] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Acquired lock "refresh_cache-0a62f878-43c1-4aaf-9054-798572b4faa7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.583789] env[61974]: DEBUG nova.network.neutron [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 907.611178] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0ad61ad4-c9bb-46e8-af9f-3bf20444ee1c tempest-ServersTestJSON-1327942458 tempest-ServersTestJSON-1327942458-project-member] Lock "b62397bb-95b4-4d07-819a-bfcfd7c6a38e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 142.033s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.915150] env[61974]: INFO nova.compute.manager [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] [instance: 11d4f981-b167-4c81-9cd7-7e939606d400] Took 1.03 seconds to deallocate network for instance. [ 907.926496] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1378989, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066181} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.926744] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 907.927539] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23374e4-edec-4f70-b710-6fdf981f9c3a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.950273] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] a9edbd98-3e67-476b-934d-15d893a62d02/a9edbd98-3e67-476b-934d-15d893a62d02.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 907.950722] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6b9cfe3-8134-4049-8f0e-a245f919b907 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.971559] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Waiting for the task: (returnval){ [ 907.971559] env[61974]: value = "task-1378990" [ 907.971559] env[61974]: _type = "Task" [ 907.971559] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.981981] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1378990, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.109207] env[61974]: DEBUG nova.network.neutron [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 908.113810] env[61974]: DEBUG nova.compute.manager [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 908.196757] env[61974]: DEBUG nova.network.neutron [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.483077] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1378990, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.640856] env[61974]: DEBUG oslo_concurrency.lockutils [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.698981] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Releasing lock "refresh_cache-0a62f878-43c1-4aaf-9054-798572b4faa7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 908.699266] env[61974]: DEBUG nova.compute.manager [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 908.699463] env[61974]: DEBUG nova.compute.manager [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 908.699634] env[61974]: DEBUG nova.network.neutron [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 908.715107] env[61974]: DEBUG nova.network.neutron [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 908.873966] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b89589a-99af-4f70-a40e-3f9c93490381 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.887758] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd30d1b-c657-489b-b3ae-4f418dae69e8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.916982] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a2a856-d832-415a-874f-e8cfcc16949d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.924252] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2751a1ba-b7ae-4291-b0dc-832020d5d96f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.940951] env[61974]: DEBUG nova.compute.provider_tree [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.960658] env[61974]: INFO nova.scheduler.client.report [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Deleted allocations for instance 11d4f981-b167-4c81-9cd7-7e939606d400 [ 908.983047] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1378990, 'name': ReconfigVM_Task, 'duration_secs': 0.51222} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.983332] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Reconfigured VM instance instance-0000003f to attach disk [datastore2] a9edbd98-3e67-476b-934d-15d893a62d02/a9edbd98-3e67-476b-934d-15d893a62d02.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 908.984035] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3da50294-bbb6-49ac-8997-c11e06f4164a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.991076] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Waiting for the task: (returnval){ [ 908.991076] env[61974]: value = "task-1378991" [ 908.991076] env[61974]: _type = "Task" [ 908.991076] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.998815] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1378991, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.217934] env[61974]: DEBUG nova.network.neutron [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.444330] env[61974]: DEBUG nova.scheduler.client.report [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 909.471189] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2487462f-21c2-4cc6-a82a-5844b61a6827 tempest-ServersTestManualDisk-1461046518 tempest-ServersTestManualDisk-1461046518-project-member] Lock "11d4f981-b167-4c81-9cd7-7e939606d400" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 134.795s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.501857] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1378991, 'name': Rename_Task, 'duration_secs': 0.146035} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.502158] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 909.502399] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7855b912-38a9-4c3d-bd25-ec420ff38ee4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.508601] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Waiting for the task: (returnval){ [ 909.508601] env[61974]: value = "task-1378992" [ 909.508601] env[61974]: _type = "Task" [ 909.508601] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.516387] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1378992, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.723037] env[61974]: INFO nova.compute.manager [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] [instance: 0a62f878-43c1-4aaf-9054-798572b4faa7] Took 1.02 seconds to deallocate network for instance. [ 909.951406] env[61974]: DEBUG oslo_concurrency.lockutils [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.373s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.952580] env[61974]: DEBUG nova.compute.manager [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 909.954707] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.771s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.977287] env[61974]: DEBUG nova.compute.manager [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 910.021609] env[61974]: DEBUG oslo_vmware.api [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1378992, 'name': PowerOnVM_Task, 'duration_secs': 0.474402} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.022036] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 910.022738] env[61974]: INFO nova.compute.manager [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Took 7.79 seconds to spawn the instance on the hypervisor. [ 910.023065] env[61974]: DEBUG nova.compute.manager [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 910.024207] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f111e47-bf7c-40e8-b4a1-f72063fad9ac {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.459283] env[61974]: DEBUG nova.compute.utils [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 910.465776] env[61974]: DEBUG nova.compute.manager [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 910.465776] env[61974]: DEBUG nova.network.neutron [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 910.497230] env[61974]: DEBUG oslo_concurrency.lockutils [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.515016] env[61974]: DEBUG nova.policy [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c5f6d80a0784b1f8ff2b2fcfbb44232', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '40e43abf62a5464091aa725e1cff2b50', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 910.545250] env[61974]: INFO nova.compute.manager [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Took 27.40 seconds to build instance. [ 910.736415] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0493395e-5b6d-4f5d-9e82-03d14033bfe0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.745092] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-283514a0-ad84-4a28-afac-190e407d9282 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.779652] env[61974]: INFO nova.scheduler.client.report [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Deleted allocations for instance 0a62f878-43c1-4aaf-9054-798572b4faa7 [ 910.785692] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c824c68-ec7d-4ae2-92a6-614ee116ac99 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.795567] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80659d2-76db-45b0-a0dc-e050b583f238 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.811660] env[61974]: DEBUG nova.compute.provider_tree [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.840758] env[61974]: DEBUG nova.network.neutron [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Successfully created port: e143051e-56f9-4303-833b-6e0bda6b385a {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 910.967495] env[61974]: DEBUG nova.compute.manager [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 911.048099] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c9d63e60-f5da-4eb5-ae61-a133a0d6913c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Lock "a9edbd98-3e67-476b-934d-15d893a62d02" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 117.161s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.291457] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dc676899-6b8b-4456-b1d9-6021b826381c tempest-ServerMetadataNegativeTestJSON-783151484 tempest-ServerMetadataNegativeTestJSON-783151484-project-member] Lock "0a62f878-43c1-4aaf-9054-798572b4faa7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.920s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.312647] env[61974]: DEBUG nova.scheduler.client.report [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 911.553418] env[61974]: DEBUG nova.compute.manager [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 911.576973] env[61974]: INFO nova.compute.manager [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Rescuing [ 911.578401] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquiring lock "refresh_cache-a9edbd98-3e67-476b-934d-15d893a62d02" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.578401] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquired lock "refresh_cache-a9edbd98-3e67-476b-934d-15d893a62d02" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.578401] env[61974]: DEBUG nova.network.neutron [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 911.794297] env[61974]: DEBUG nova.compute.manager [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 911.819402] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.865s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.821275] env[61974]: ERROR nova.compute.manager [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 494bb9c4-79d8-4ab3-b214-874dd1e3ceb5, please check neutron logs for more information. [ 911.821275] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Traceback (most recent call last): [ 911.821275] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 911.821275] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] self.driver.spawn(context, instance, image_meta, [ 911.821275] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 911.821275] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] self._vmops.spawn(context, instance, image_meta, injected_files, [ 911.821275] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 911.821275] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] vm_ref = self.build_virtual_machine(instance, [ 911.821275] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 911.821275] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] vif_infos = vmwarevif.get_vif_info(self._session, [ 911.821275] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 911.821724] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] for vif in network_info: [ 911.821724] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 911.821724] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] return self._sync_wrapper(fn, *args, **kwargs) [ 911.821724] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 911.821724] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] self.wait() [ 911.821724] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 911.821724] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] self[:] = self._gt.wait() [ 911.821724] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 911.821724] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] return self._exit_event.wait() [ 911.821724] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 911.821724] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] current.throw(*self._exc) [ 911.821724] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 911.821724] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] result = function(*args, **kwargs) [ 911.822042] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 911.822042] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] return func(*args, **kwargs) [ 911.822042] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 911.822042] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] raise e [ 911.822042] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 911.822042] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] nwinfo = self.network_api.allocate_for_instance( [ 911.822042] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 911.822042] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] created_port_ids = self._update_ports_for_instance( [ 911.822042] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 911.822042] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] with excutils.save_and_reraise_exception(): [ 911.822042] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 911.822042] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] self.force_reraise() [ 911.822042] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 911.822341] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] raise self.value [ 911.822341] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 911.822341] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] updated_port = self._update_port( [ 911.822341] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 911.822341] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] _ensure_no_port_binding_failure(port) [ 911.822341] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 911.822341] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] raise exception.PortBindingFailed(port_id=port['id']) [ 911.822341] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] nova.exception.PortBindingFailed: Binding failed for port 494bb9c4-79d8-4ab3-b214-874dd1e3ceb5, please check neutron logs for more information. [ 911.822341] env[61974]: ERROR nova.compute.manager [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] [ 911.822341] env[61974]: DEBUG nova.compute.utils [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Binding failed for port 494bb9c4-79d8-4ab3-b214-874dd1e3ceb5, please check neutron logs for more information. {{(pid=61974) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 911.822583] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.983s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.824290] env[61974]: INFO nova.compute.claims [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 911.827048] env[61974]: DEBUG nova.compute.manager [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Build of instance 68794d97-95f7-4612-9f9f-e370afb3d852 was re-scheduled: Binding failed for port 494bb9c4-79d8-4ab3-b214-874dd1e3ceb5, please check neutron logs for more information. {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 911.827494] env[61974]: DEBUG nova.compute.manager [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Unplugging VIFs for instance {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 911.827722] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquiring lock "refresh_cache-68794d97-95f7-4612-9f9f-e370afb3d852" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.827872] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquired lock "refresh_cache-68794d97-95f7-4612-9f9f-e370afb3d852" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.828064] env[61974]: DEBUG nova.network.neutron [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 911.982692] env[61974]: DEBUG nova.compute.manager [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 912.017330] env[61974]: DEBUG nova.virt.hardware [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 912.017622] env[61974]: DEBUG nova.virt.hardware [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 912.017786] env[61974]: DEBUG nova.virt.hardware [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 912.017968] env[61974]: DEBUG nova.virt.hardware [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 912.019258] env[61974]: DEBUG nova.virt.hardware [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 912.019516] env[61974]: DEBUG nova.virt.hardware [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 912.019801] env[61974]: DEBUG nova.virt.hardware [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 912.021367] env[61974]: DEBUG nova.virt.hardware [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 912.021623] env[61974]: DEBUG nova.virt.hardware [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 912.021855] env[61974]: DEBUG nova.virt.hardware [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 912.022140] env[61974]: DEBUG nova.virt.hardware [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 912.023402] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbef9218-938a-44b7-b777-e87d47a637e4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.032848] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4791249a-0a8a-4dc4-b742-06be50969a4c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.082687] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.323517] env[61974]: DEBUG oslo_concurrency.lockutils [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.384414] env[61974]: DEBUG nova.network.neutron [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 912.669831] env[61974]: DEBUG nova.network.neutron [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Updating instance_info_cache with network_info: [{"id": "b3427315-9e46-4452-9a4b-43147b6cbc63", "address": "fa:16:3e:8c:21:6a", "network": {"id": "c7e214e9-6a9e-4dac-bc27-2bcd615ff93c", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1569316101-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fcd4aebfaf0f494c98cde099c7e28363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "767a3a48-41d4-4a0c-961d-0024837f63bd", "external-id": "nsx-vlan-transportzone-141", "segmentation_id": 141, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3427315-9e", "ovs_interfaceid": "b3427315-9e46-4452-9a4b-43147b6cbc63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.675640] env[61974]: DEBUG nova.network.neutron [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.794165] env[61974]: DEBUG nova.compute.manager [req-35a1d814-f352-464d-8fab-b875b72d19de req-211f6284-4f76-46ce-bc92-4cf53c697e8f service nova] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Received event network-vif-plugged-e143051e-56f9-4303-833b-6e0bda6b385a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 912.794463] env[61974]: DEBUG oslo_concurrency.lockutils [req-35a1d814-f352-464d-8fab-b875b72d19de req-211f6284-4f76-46ce-bc92-4cf53c697e8f service nova] Acquiring lock "635f362a-582e-44bc-85d8-8a69943982b0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.794733] env[61974]: DEBUG oslo_concurrency.lockutils [req-35a1d814-f352-464d-8fab-b875b72d19de req-211f6284-4f76-46ce-bc92-4cf53c697e8f service nova] Lock "635f362a-582e-44bc-85d8-8a69943982b0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.794967] env[61974]: DEBUG oslo_concurrency.lockutils [req-35a1d814-f352-464d-8fab-b875b72d19de req-211f6284-4f76-46ce-bc92-4cf53c697e8f service nova] Lock "635f362a-582e-44bc-85d8-8a69943982b0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.795214] env[61974]: DEBUG nova.compute.manager [req-35a1d814-f352-464d-8fab-b875b72d19de req-211f6284-4f76-46ce-bc92-4cf53c697e8f service nova] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] No waiting events found dispatching network-vif-plugged-e143051e-56f9-4303-833b-6e0bda6b385a {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 912.795452] env[61974]: WARNING nova.compute.manager [req-35a1d814-f352-464d-8fab-b875b72d19de req-211f6284-4f76-46ce-bc92-4cf53c697e8f service nova] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Received unexpected event network-vif-plugged-e143051e-56f9-4303-833b-6e0bda6b385a for instance with vm_state building and task_state spawning. [ 913.172190] env[61974]: DEBUG nova.network.neutron [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Successfully updated port: e143051e-56f9-4303-833b-6e0bda6b385a {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 913.173729] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Releasing lock "refresh_cache-a9edbd98-3e67-476b-934d-15d893a62d02" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.178476] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Releasing lock "refresh_cache-68794d97-95f7-4612-9f9f-e370afb3d852" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.179277] env[61974]: DEBUG nova.compute.manager [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61974) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 913.179277] env[61974]: DEBUG nova.compute.manager [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 913.179277] env[61974]: DEBUG nova.network.neutron [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 913.204208] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe70190-021f-44d0-b9ac-85cb787cfb40 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.210877] env[61974]: DEBUG nova.network.neutron [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 913.216944] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d41b4bd-a74b-4543-9258-c6697b83d5e1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.250366] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf2d969-34ac-4d17-9d0d-c4de7c39de40 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.258234] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1fa8c08-d8d4-4c52-b993-7267372d0d14 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.274200] env[61974]: DEBUG nova.compute.provider_tree [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 913.678767] env[61974]: DEBUG oslo_concurrency.lockutils [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "refresh_cache-635f362a-582e-44bc-85d8-8a69943982b0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 913.679023] env[61974]: DEBUG oslo_concurrency.lockutils [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquired lock "refresh_cache-635f362a-582e-44bc-85d8-8a69943982b0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.679088] env[61974]: DEBUG nova.network.neutron [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 913.714500] env[61974]: DEBUG nova.network.neutron [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.715803] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 913.716088] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fe30acf7-61ee-49a3-8af9-bb26b9dc1448 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.723074] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Waiting for the task: (returnval){ [ 913.723074] env[61974]: value = "task-1378993" [ 913.723074] env[61974]: _type = "Task" [ 913.723074] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.734668] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1378993, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.776969] env[61974]: DEBUG nova.scheduler.client.report [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 914.222355] env[61974]: INFO nova.compute.manager [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: 68794d97-95f7-4612-9f9f-e370afb3d852] Took 1.04 seconds to deallocate network for instance. [ 914.234588] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1378993, 'name': PowerOffVM_Task, 'duration_secs': 0.203249} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.235603] env[61974]: DEBUG nova.network.neutron [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 914.238861] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 914.239790] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f398ca-d207-4632-b939-873505198924 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.267131] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5667cf-2281-4f27-bb24-955481f8058b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.290250] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.468s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.291363] env[61974]: DEBUG oslo_concurrency.lockutils [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.553s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.293124] env[61974]: INFO nova.compute.claims [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 914.306373] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 914.306373] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c47e2847-b8b6-42b1-b12c-942d485a7b89 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.313777] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Waiting for the task: (returnval){ [ 914.313777] env[61974]: value = "task-1378994" [ 914.313777] env[61974]: _type = "Task" [ 914.313777] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.325207] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] VM already powered off {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 914.326631] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 914.327435] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.327435] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.327435] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 914.327662] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b80cd24f-990c-4dcd-88e4-bc6ee7864146 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.337039] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 914.337039] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 914.337039] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85e6b987-7e0f-47ef-8cdb-db3ec89fee9f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.342406] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Waiting for the task: (returnval){ [ 914.342406] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5280c22b-0407-2cd4-ea39-2e7ad6f540fb" [ 914.342406] env[61974]: _type = "Task" [ 914.342406] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.351311] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5280c22b-0407-2cd4-ea39-2e7ad6f540fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.435627] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "450956aa-cc55-481c-acf6-287abc8b8efe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.436347] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "450956aa-cc55-481c-acf6-287abc8b8efe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.512046] env[61974]: DEBUG nova.network.neutron [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Updating instance_info_cache with network_info: [{"id": "e143051e-56f9-4303-833b-6e0bda6b385a", "address": "fa:16:3e:cd:ec:51", "network": {"id": "870e2179-016d-4c2a-8dff-b56143e5db7b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1966833648-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40e43abf62a5464091aa725e1cff2b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape143051e-56", "ovs_interfaceid": "e143051e-56f9-4303-833b-6e0bda6b385a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.798838] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Acquiring lock "200e39c2-f292-4336-83eb-07d689f44653" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.798838] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Lock "200e39c2-f292-4336-83eb-07d689f44653" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.820630] env[61974]: DEBUG nova.compute.manager [req-fc995eee-4e70-40f2-936a-f5a1d186ed14 req-c4595aee-777e-4fa6-929b-3179307d4f5d service nova] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Received event network-changed-e143051e-56f9-4303-833b-6e0bda6b385a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 914.821075] env[61974]: DEBUG nova.compute.manager [req-fc995eee-4e70-40f2-936a-f5a1d186ed14 req-c4595aee-777e-4fa6-929b-3179307d4f5d service nova] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Refreshing instance network info cache due to event network-changed-e143051e-56f9-4303-833b-6e0bda6b385a. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 914.821075] env[61974]: DEBUG oslo_concurrency.lockutils [req-fc995eee-4e70-40f2-936a-f5a1d186ed14 req-c4595aee-777e-4fa6-929b-3179307d4f5d service nova] Acquiring lock "refresh_cache-635f362a-582e-44bc-85d8-8a69943982b0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.824194] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "68ad5903-e502-406b-a19e-9e4c28aa5035" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.824443] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "68ad5903-e502-406b-a19e-9e4c28aa5035" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.852187] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5280c22b-0407-2cd4-ea39-2e7ad6f540fb, 'name': SearchDatastore_Task, 'duration_secs': 0.009217} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.852946] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-adbcae9c-a10c-48df-b2f5-6d62b522bf2f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.857796] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Waiting for the task: (returnval){ [ 914.857796] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]522c900c-9199-a56a-ae67-04fc7898c645" [ 914.857796] env[61974]: _type = "Task" [ 914.857796] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.864618] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]522c900c-9199-a56a-ae67-04fc7898c645, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.014377] env[61974]: DEBUG oslo_concurrency.lockutils [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Releasing lock "refresh_cache-635f362a-582e-44bc-85d8-8a69943982b0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.014737] env[61974]: DEBUG nova.compute.manager [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Instance network_info: |[{"id": "e143051e-56f9-4303-833b-6e0bda6b385a", "address": "fa:16:3e:cd:ec:51", "network": {"id": "870e2179-016d-4c2a-8dff-b56143e5db7b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1966833648-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40e43abf62a5464091aa725e1cff2b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape143051e-56", "ovs_interfaceid": "e143051e-56f9-4303-833b-6e0bda6b385a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 915.015121] env[61974]: DEBUG oslo_concurrency.lockutils [req-fc995eee-4e70-40f2-936a-f5a1d186ed14 req-c4595aee-777e-4fa6-929b-3179307d4f5d service nova] Acquired lock "refresh_cache-635f362a-582e-44bc-85d8-8a69943982b0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.015313] env[61974]: DEBUG nova.network.neutron [req-fc995eee-4e70-40f2-936a-f5a1d186ed14 req-c4595aee-777e-4fa6-929b-3179307d4f5d service nova] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Refreshing network info cache for port e143051e-56f9-4303-833b-6e0bda6b385a {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 915.016672] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:ec:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '171aeae0-6a27-44fc-bc3d-a2d5581fc702', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e143051e-56f9-4303-833b-6e0bda6b385a', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 915.024933] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Creating folder: Project (40e43abf62a5464091aa725e1cff2b50). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 915.028799] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8dce52e9-a43c-4e50-8ab4-95a15eaf3224 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.040516] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Created folder: Project (40e43abf62a5464091aa725e1cff2b50) in parent group-v292912. [ 915.040705] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Creating folder: Instances. Parent ref: group-v292939. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 915.040938] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0de04ba6-3bcc-4502-a529-f03a62fb3398 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.052049] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Created folder: Instances in parent group-v292939. [ 915.052049] env[61974]: DEBUG oslo.service.loopingcall [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 915.052049] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 915.052229] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-79f8c1ff-47af-4d19-b9e6-f6da64ec344a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.072808] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 915.072808] env[61974]: value = "task-1378997" [ 915.072808] env[61974]: _type = "Task" [ 915.072808] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.080268] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1378997, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.240569] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "90f8acb1-a0b5-4459-a9d7-c12f652b0b51" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.240880] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "90f8acb1-a0b5-4459-a9d7-c12f652b0b51" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.249205] env[61974]: INFO nova.scheduler.client.report [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Deleted allocations for instance 68794d97-95f7-4612-9f9f-e370afb3d852 [ 915.263240] env[61974]: DEBUG nova.network.neutron [req-fc995eee-4e70-40f2-936a-f5a1d186ed14 req-c4595aee-777e-4fa6-929b-3179307d4f5d service nova] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Updated VIF entry in instance network info cache for port e143051e-56f9-4303-833b-6e0bda6b385a. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 915.264014] env[61974]: DEBUG nova.network.neutron [req-fc995eee-4e70-40f2-936a-f5a1d186ed14 req-c4595aee-777e-4fa6-929b-3179307d4f5d service nova] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Updating instance_info_cache with network_info: [{"id": "e143051e-56f9-4303-833b-6e0bda6b385a", "address": "fa:16:3e:cd:ec:51", "network": {"id": "870e2179-016d-4c2a-8dff-b56143e5db7b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1966833648-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40e43abf62a5464091aa725e1cff2b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape143051e-56", "ovs_interfaceid": "e143051e-56f9-4303-833b-6e0bda6b385a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.303030] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Lock "200e39c2-f292-4336-83eb-07d689f44653" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.504s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.303313] env[61974]: DEBUG nova.compute.manager [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 915.368646] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]522c900c-9199-a56a-ae67-04fc7898c645, 'name': SearchDatastore_Task, 'duration_secs': 0.008854} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.371442] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.371710] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] a9edbd98-3e67-476b-934d-15d893a62d02/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8-rescue.vmdk. {{(pid=61974) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 915.372170] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3c59ee4-1565-46d7-8cc0-99e83568b7d6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.379949] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Waiting for the task: (returnval){ [ 915.379949] env[61974]: value = "task-1378998" [ 915.379949] env[61974]: _type = "Task" [ 915.379949] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.390236] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1378998, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.551410] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e2f36d2-d3df-4f63-92cc-112e4ddf6d5e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.559624] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16528ef2-cd1d-4c5f-b248-5804c3a832e1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.593311] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84073360-b034-46da-a288-e060dca4d775 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.602233] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1378997, 'name': CreateVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.603427] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92a18625-3fa9-4cbf-8304-1080134d67b3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.616669] env[61974]: DEBUG nova.compute.provider_tree [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 915.757023] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b63cdfb4-7ec8-4508-a12d-56fa30f842d0 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Lock "68794d97-95f7-4612-9f9f-e370afb3d852" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 122.323s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.766095] env[61974]: DEBUG oslo_concurrency.lockutils [req-fc995eee-4e70-40f2-936a-f5a1d186ed14 req-c4595aee-777e-4fa6-929b-3179307d4f5d service nova] Releasing lock "refresh_cache-635f362a-582e-44bc-85d8-8a69943982b0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.811064] env[61974]: DEBUG nova.compute.utils [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 915.812364] env[61974]: DEBUG nova.compute.manager [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 915.812543] env[61974]: DEBUG nova.network.neutron [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 915.877532] env[61974]: DEBUG nova.policy [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20aa9af35e1b41d78498117b686b93ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fac25e52374f45d890ce85262657fa17', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 915.889106] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1378998, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.103140] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1378997, 'name': CreateVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.120577] env[61974]: DEBUG nova.scheduler.client.report [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 916.171442] env[61974]: DEBUG nova.network.neutron [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Successfully created port: 6198b979-646d-4f9b-bcd2-3dbcab269efd {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 916.259487] env[61974]: DEBUG nova.compute.manager [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 916.315934] env[61974]: DEBUG nova.compute.manager [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 916.391166] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1378998, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.601536] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1378997, 'name': CreateVM_Task, 'duration_secs': 1.384914} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.601729] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 916.602481] env[61974]: DEBUG oslo_concurrency.lockutils [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.602651] env[61974]: DEBUG oslo_concurrency.lockutils [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.603011] env[61974]: DEBUG oslo_concurrency.lockutils [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 916.603293] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb774d5f-56c1-4317-ab23-da603f28ff4e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.608305] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 916.608305] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52bd8625-4a77-4c6e-3748-3c1c09393528" [ 916.608305] env[61974]: _type = "Task" [ 916.608305] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.617993] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52bd8625-4a77-4c6e-3748-3c1c09393528, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.625566] env[61974]: DEBUG oslo_concurrency.lockutils [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.334s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.626158] env[61974]: DEBUG nova.compute.manager [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 916.629112] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.051s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.630641] env[61974]: INFO nova.compute.claims [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 916.783496] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.891165] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1378998, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.399241} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.891461] env[61974]: INFO nova.virt.vmwareapi.ds_util [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] a9edbd98-3e67-476b-934d-15d893a62d02/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8-rescue.vmdk. [ 916.892229] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a5c91a0-2c7d-4d7e-9f48-aab38a9a35c2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.915646] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] a9edbd98-3e67-476b-934d-15d893a62d02/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8-rescue.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 916.916212] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa1cb2fa-e453-4160-8f6d-28ed48f2dfba {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.933252] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Waiting for the task: (returnval){ [ 916.933252] env[61974]: value = "task-1378999" [ 916.933252] env[61974]: _type = "Task" [ 916.933252] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.940868] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1378999, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.120047] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52bd8625-4a77-4c6e-3748-3c1c09393528, 'name': SearchDatastore_Task, 'duration_secs': 0.021619} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.120346] env[61974]: DEBUG oslo_concurrency.lockutils [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.120622] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 917.120888] env[61974]: DEBUG oslo_concurrency.lockutils [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.121087] env[61974]: DEBUG oslo_concurrency.lockutils [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.121315] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 917.121598] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e07a6a90-0581-4322-a9a2-fda54954356a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.129452] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 917.129627] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 917.130348] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6eaf0240-2479-43a8-8a79-8d472696aa3d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.134918] env[61974]: DEBUG nova.compute.utils [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 917.139098] env[61974]: DEBUG nova.compute.manager [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 917.139292] env[61974]: DEBUG nova.network.neutron [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 917.140975] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 917.140975] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5233862e-bd52-407e-0a08-704f5282ec58" [ 917.140975] env[61974]: _type = "Task" [ 917.140975] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.151227] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5233862e-bd52-407e-0a08-704f5282ec58, 'name': SearchDatastore_Task, 'duration_secs': 0.008966} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.151484] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc3d4048-52ef-43ee-81e0-c18fc4b86fdc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.156893] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 917.156893] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]523de601-ef1f-e857-5bf7-2216267d3393" [ 917.156893] env[61974]: _type = "Task" [ 917.156893] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.164534] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]523de601-ef1f-e857-5bf7-2216267d3393, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.210113] env[61974]: DEBUG nova.policy [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6173db476e814cbaa6b3278cfa527bbb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7dae05232e0041dba49b0432d64d82d2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 917.324313] env[61974]: DEBUG nova.compute.manager [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 917.349687] env[61974]: DEBUG nova.virt.hardware [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 917.349927] env[61974]: DEBUG nova.virt.hardware [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 917.350098] env[61974]: DEBUG nova.virt.hardware [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 917.350280] env[61974]: DEBUG nova.virt.hardware [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 917.350494] env[61974]: DEBUG nova.virt.hardware [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 917.350650] env[61974]: DEBUG nova.virt.hardware [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 917.350851] env[61974]: DEBUG nova.virt.hardware [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 917.351012] env[61974]: DEBUG nova.virt.hardware [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 917.351183] env[61974]: DEBUG nova.virt.hardware [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 917.351345] env[61974]: DEBUG nova.virt.hardware [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 917.351514] env[61974]: DEBUG nova.virt.hardware [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 917.352379] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd4555cb-2e0a-4c8c-97a7-3918b2cd5f44 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.359969] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-863d6961-9a3b-413a-8db8-b99feed77b91 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.442280] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1378999, 'name': ReconfigVM_Task, 'duration_secs': 0.267771} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.442570] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Reconfigured VM instance instance-0000003f to attach disk [datastore2] a9edbd98-3e67-476b-934d-15d893a62d02/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8-rescue.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 917.443375] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9269b62-3b15-43a3-ab58-58f814972e9e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.467879] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3130984d-d14b-4004-a64e-1b9b34f8c3e7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.483302] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Waiting for the task: (returnval){ [ 917.483302] env[61974]: value = "task-1379000" [ 917.483302] env[61974]: _type = "Task" [ 917.483302] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.491603] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1379000, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.500043] env[61974]: DEBUG nova.network.neutron [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Successfully created port: 31fec0d1-b5b6-4c0c-ba81-11fad03cfa19 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 917.639076] env[61974]: DEBUG nova.compute.manager [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 917.666869] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]523de601-ef1f-e857-5bf7-2216267d3393, 'name': SearchDatastore_Task, 'duration_secs': 0.007985} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.667142] env[61974]: DEBUG oslo_concurrency.lockutils [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.670022] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 635f362a-582e-44bc-85d8-8a69943982b0/635f362a-582e-44bc-85d8-8a69943982b0.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 917.670022] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b919531e-1421-4cb5-aa2e-2021c14af416 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.674378] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 917.674378] env[61974]: value = "task-1379001" [ 917.674378] env[61974]: _type = "Task" [ 917.674378] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.685590] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379001, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.834130] env[61974]: DEBUG nova.compute.manager [req-e6c184a2-0337-491c-b528-ea38f53c11e7 req-337522ae-9661-4a0e-9bf4-3a192559b7e0 service nova] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Received event network-vif-plugged-6198b979-646d-4f9b-bcd2-3dbcab269efd {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 917.834369] env[61974]: DEBUG oslo_concurrency.lockutils [req-e6c184a2-0337-491c-b528-ea38f53c11e7 req-337522ae-9661-4a0e-9bf4-3a192559b7e0 service nova] Acquiring lock "f88f0ef2-24f2-4eef-92a3-8de2ebb6944a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.834562] env[61974]: DEBUG oslo_concurrency.lockutils [req-e6c184a2-0337-491c-b528-ea38f53c11e7 req-337522ae-9661-4a0e-9bf4-3a192559b7e0 service nova] Lock "f88f0ef2-24f2-4eef-92a3-8de2ebb6944a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.834730] env[61974]: DEBUG oslo_concurrency.lockutils [req-e6c184a2-0337-491c-b528-ea38f53c11e7 req-337522ae-9661-4a0e-9bf4-3a192559b7e0 service nova] Lock "f88f0ef2-24f2-4eef-92a3-8de2ebb6944a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.834900] env[61974]: DEBUG nova.compute.manager [req-e6c184a2-0337-491c-b528-ea38f53c11e7 req-337522ae-9661-4a0e-9bf4-3a192559b7e0 service nova] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] No waiting events found dispatching network-vif-plugged-6198b979-646d-4f9b-bcd2-3dbcab269efd {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 917.835078] env[61974]: WARNING nova.compute.manager [req-e6c184a2-0337-491c-b528-ea38f53c11e7 req-337522ae-9661-4a0e-9bf4-3a192559b7e0 service nova] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Received unexpected event network-vif-plugged-6198b979-646d-4f9b-bcd2-3dbcab269efd for instance with vm_state building and task_state spawning. [ 917.964814] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-646d2d80-0900-4cd5-abbe-33fa3074c000 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.972974] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e7d559-34fc-4959-aa63-1308aa359832 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.010479] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1931d674-0ffb-48ed-b205-d5dd430b927a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.018322] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1379000, 'name': ReconfigVM_Task, 'duration_secs': 0.142439} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.020663] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 918.020979] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cd6ba4c3-126a-46db-a28d-c5de65f1ddc9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.023624] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c234b41e-2ab8-4849-8a3b-3ba0ce7596a8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.030420] env[61974]: DEBUG nova.network.neutron [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Successfully updated port: 6198b979-646d-4f9b-bcd2-3dbcab269efd {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 918.040129] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Waiting for the task: (returnval){ [ 918.040129] env[61974]: value = "task-1379002" [ 918.040129] env[61974]: _type = "Task" [ 918.040129] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.040539] env[61974]: DEBUG nova.compute.provider_tree [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 918.052952] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1379002, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.184558] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379001, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507458} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.185545] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 635f362a-582e-44bc-85d8-8a69943982b0/635f362a-582e-44bc-85d8-8a69943982b0.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 918.185787] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 918.186106] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-80932a35-b191-43c2-9eee-1871c244a833 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.192922] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 918.192922] env[61974]: value = "task-1379003" [ 918.192922] env[61974]: _type = "Task" [ 918.192922] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.201303] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379003, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.544661] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Acquiring lock "refresh_cache-f88f0ef2-24f2-4eef-92a3-8de2ebb6944a" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.544661] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Acquired lock "refresh_cache-f88f0ef2-24f2-4eef-92a3-8de2ebb6944a" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.544661] env[61974]: DEBUG nova.network.neutron [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 918.552696] env[61974]: DEBUG nova.scheduler.client.report [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 918.565781] env[61974]: DEBUG oslo_vmware.api [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1379002, 'name': PowerOnVM_Task, 'duration_secs': 0.456359} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.566205] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 918.568911] env[61974]: DEBUG nova.compute.manager [None req-2f6db866-ea5f-4d55-9929-50f45d9a2d5c tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 918.569718] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90811964-fd1e-4977-a280-f768f9d045eb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.652033] env[61974]: DEBUG nova.compute.manager [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 918.690736] env[61974]: DEBUG nova.virt.hardware [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 918.691957] env[61974]: DEBUG nova.virt.hardware [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 918.691957] env[61974]: DEBUG nova.virt.hardware [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 918.691957] env[61974]: DEBUG nova.virt.hardware [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 918.691957] env[61974]: DEBUG nova.virt.hardware [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 918.691957] env[61974]: DEBUG nova.virt.hardware [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 918.692235] env[61974]: DEBUG nova.virt.hardware [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 918.692235] env[61974]: DEBUG nova.virt.hardware [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 918.692235] env[61974]: DEBUG nova.virt.hardware [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 918.692365] env[61974]: DEBUG nova.virt.hardware [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 918.692562] env[61974]: DEBUG nova.virt.hardware [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 918.693781] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801e2172-0d28-44a3-9425-78e0c7c3e704 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.707012] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeedbb60-fce2-4a64-bd0a-ef11886277a7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.711385] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379003, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06758} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.711687] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 918.712886] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d259eee3-387b-4e4a-a1b6-0e8e9d0359bb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.745681] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 635f362a-582e-44bc-85d8-8a69943982b0/635f362a-582e-44bc-85d8-8a69943982b0.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 918.746361] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b82ba0f5-12c2-4d9a-8d56-883f00a24869 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.766000] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 918.766000] env[61974]: value = "task-1379004" [ 918.766000] env[61974]: _type = "Task" [ 918.766000] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.774363] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379004, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.060216] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.431s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.060755] env[61974]: DEBUG nova.compute.manager [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 919.063427] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.217s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.064844] env[61974]: INFO nova.compute.claims [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 919.115274] env[61974]: DEBUG nova.network.neutron [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 919.278706] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379004, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.343442] env[61974]: DEBUG nova.network.neutron [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Updating instance_info_cache with network_info: [{"id": "6198b979-646d-4f9b-bcd2-3dbcab269efd", "address": "fa:16:3e:b3:a4:a2", "network": {"id": "569ec175-8e13-48a8-9c60-6db2b79bea7d", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1316482843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fac25e52374f45d890ce85262657fa17", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6198b979-64", "ovs_interfaceid": "6198b979-646d-4f9b-bcd2-3dbcab269efd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.439480] env[61974]: DEBUG nova.compute.manager [req-a622f9cf-3681-4aa7-8654-7a286e12fb65 req-4e2993b8-c4f5-4525-b38e-bf9be4aa1638 service nova] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Received event network-vif-plugged-31fec0d1-b5b6-4c0c-ba81-11fad03cfa19 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 919.439800] env[61974]: DEBUG oslo_concurrency.lockutils [req-a622f9cf-3681-4aa7-8654-7a286e12fb65 req-4e2993b8-c4f5-4525-b38e-bf9be4aa1638 service nova] Acquiring lock "f0601d26-4e29-4946-bb52-50e2a2163535-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.439912] env[61974]: DEBUG oslo_concurrency.lockutils [req-a622f9cf-3681-4aa7-8654-7a286e12fb65 req-4e2993b8-c4f5-4525-b38e-bf9be4aa1638 service nova] Lock "f0601d26-4e29-4946-bb52-50e2a2163535-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.440090] env[61974]: DEBUG oslo_concurrency.lockutils [req-a622f9cf-3681-4aa7-8654-7a286e12fb65 req-4e2993b8-c4f5-4525-b38e-bf9be4aa1638 service nova] Lock "f0601d26-4e29-4946-bb52-50e2a2163535-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.440369] env[61974]: DEBUG nova.compute.manager [req-a622f9cf-3681-4aa7-8654-7a286e12fb65 req-4e2993b8-c4f5-4525-b38e-bf9be4aa1638 service nova] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] No waiting events found dispatching network-vif-plugged-31fec0d1-b5b6-4c0c-ba81-11fad03cfa19 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 919.440441] env[61974]: WARNING nova.compute.manager [req-a622f9cf-3681-4aa7-8654-7a286e12fb65 req-4e2993b8-c4f5-4525-b38e-bf9be4aa1638 service nova] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Received unexpected event network-vif-plugged-31fec0d1-b5b6-4c0c-ba81-11fad03cfa19 for instance with vm_state building and task_state spawning. [ 919.570620] env[61974]: DEBUG nova.compute.utils [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 919.574381] env[61974]: DEBUG nova.compute.manager [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 919.574381] env[61974]: DEBUG nova.network.neutron [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 919.629757] env[61974]: DEBUG nova.policy [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '453e22de6c0f478d93d6269ea122d660', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c61c671d85b64b28872586c2816b83f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 919.776534] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379004, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.847070] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Releasing lock "refresh_cache-f88f0ef2-24f2-4eef-92a3-8de2ebb6944a" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.847070] env[61974]: DEBUG nova.compute.manager [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Instance network_info: |[{"id": "6198b979-646d-4f9b-bcd2-3dbcab269efd", "address": "fa:16:3e:b3:a4:a2", "network": {"id": "569ec175-8e13-48a8-9c60-6db2b79bea7d", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1316482843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fac25e52374f45d890ce85262657fa17", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6198b979-64", "ovs_interfaceid": "6198b979-646d-4f9b-bcd2-3dbcab269efd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 919.847293] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:a4:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27abaf31-0f39-428c-a8d3-cd7548de6818', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6198b979-646d-4f9b-bcd2-3dbcab269efd', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 919.857016] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Creating folder: Project (fac25e52374f45d890ce85262657fa17). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 919.857016] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e0eecff4-03b1-4fb0-8cff-dd2e6dba1832 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.864136] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Created folder: Project (fac25e52374f45d890ce85262657fa17) in parent group-v292912. [ 919.864304] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Creating folder: Instances. Parent ref: group-v292942. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 919.864631] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bfdb0066-07a3-4379-99f0-f39fbe4ca684 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.873666] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Created folder: Instances in parent group-v292942. [ 919.873858] env[61974]: DEBUG oslo.service.loopingcall [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 919.875036] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 919.876211] env[61974]: DEBUG nova.compute.manager [req-fa1ce30b-4c7b-4ce9-9d1d-a0b5f9319354 req-67e06e3d-73b0-4c15-a089-874321c6f301 service nova] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Received event network-changed-6198b979-646d-4f9b-bcd2-3dbcab269efd {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 919.876559] env[61974]: DEBUG nova.compute.manager [req-fa1ce30b-4c7b-4ce9-9d1d-a0b5f9319354 req-67e06e3d-73b0-4c15-a089-874321c6f301 service nova] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Refreshing instance network info cache due to event network-changed-6198b979-646d-4f9b-bcd2-3dbcab269efd. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 919.876826] env[61974]: DEBUG oslo_concurrency.lockutils [req-fa1ce30b-4c7b-4ce9-9d1d-a0b5f9319354 req-67e06e3d-73b0-4c15-a089-874321c6f301 service nova] Acquiring lock "refresh_cache-f88f0ef2-24f2-4eef-92a3-8de2ebb6944a" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.877025] env[61974]: DEBUG oslo_concurrency.lockutils [req-fa1ce30b-4c7b-4ce9-9d1d-a0b5f9319354 req-67e06e3d-73b0-4c15-a089-874321c6f301 service nova] Acquired lock "refresh_cache-f88f0ef2-24f2-4eef-92a3-8de2ebb6944a" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.877239] env[61974]: DEBUG nova.network.neutron [req-fa1ce30b-4c7b-4ce9-9d1d-a0b5f9319354 req-67e06e3d-73b0-4c15-a089-874321c6f301 service nova] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Refreshing network info cache for port 6198b979-646d-4f9b-bcd2-3dbcab269efd {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 919.878533] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0736e456-0735-45f0-a32d-42b0cc8803d3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.900673] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 919.900673] env[61974]: value = "task-1379007" [ 919.900673] env[61974]: _type = "Task" [ 919.900673] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.901502] env[61974]: DEBUG nova.network.neutron [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Successfully updated port: 31fec0d1-b5b6-4c0c-ba81-11fad03cfa19 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 919.916845] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379007, 'name': CreateVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.075443] env[61974]: DEBUG nova.compute.manager [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 920.090073] env[61974]: DEBUG nova.network.neutron [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Successfully created port: f76d592c-5eee-4379-b971-9896eb2bb538 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 920.276800] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379004, 'name': ReconfigVM_Task, 'duration_secs': 1.089487} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.279456] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 635f362a-582e-44bc-85d8-8a69943982b0/635f362a-582e-44bc-85d8-8a69943982b0.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 920.280985] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a76946f7-e156-49ad-977c-17939af95ae9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.287187] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 920.287187] env[61974]: value = "task-1379008" [ 920.287187] env[61974]: _type = "Task" [ 920.287187] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.297860] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379008, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.395219] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ec41aa-a79c-45ce-8988-1bf08c9fdf3b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.402963] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-960c86fc-9c28-43b7-a647-8c968b15b20d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.411333] env[61974]: DEBUG oslo_concurrency.lockutils [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "refresh_cache-f0601d26-4e29-4946-bb52-50e2a2163535" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 920.411479] env[61974]: DEBUG oslo_concurrency.lockutils [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "refresh_cache-f0601d26-4e29-4946-bb52-50e2a2163535" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.411614] env[61974]: DEBUG nova.network.neutron [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 920.441879] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f87b12-510d-43cc-9ac2-d61ef0652f3d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.450471] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379007, 'name': CreateVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.454841] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a774ca-38e1-41e6-ad2c-1aa91b2ae25c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.469465] env[61974]: DEBUG nova.compute.provider_tree [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.484758] env[61974]: DEBUG nova.network.neutron [req-fa1ce30b-4c7b-4ce9-9d1d-a0b5f9319354 req-67e06e3d-73b0-4c15-a089-874321c6f301 service nova] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Updated VIF entry in instance network info cache for port 6198b979-646d-4f9b-bcd2-3dbcab269efd. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 920.485131] env[61974]: DEBUG nova.network.neutron [req-fa1ce30b-4c7b-4ce9-9d1d-a0b5f9319354 req-67e06e3d-73b0-4c15-a089-874321c6f301 service nova] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Updating instance_info_cache with network_info: [{"id": "6198b979-646d-4f9b-bcd2-3dbcab269efd", "address": "fa:16:3e:b3:a4:a2", "network": {"id": "569ec175-8e13-48a8-9c60-6db2b79bea7d", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1316482843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fac25e52374f45d890ce85262657fa17", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6198b979-64", "ovs_interfaceid": "6198b979-646d-4f9b-bcd2-3dbcab269efd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.800425] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379008, 'name': Rename_Task, 'duration_secs': 0.154295} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.800779] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 920.801049] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c8b4015-9216-46aa-a74b-801080c455ff {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.807713] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 920.807713] env[61974]: value = "task-1379009" [ 920.807713] env[61974]: _type = "Task" [ 920.807713] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.816531] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379009, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.919711] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379007, 'name': CreateVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.974315] env[61974]: DEBUG nova.scheduler.client.report [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 920.978043] env[61974]: DEBUG nova.network.neutron [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 920.988250] env[61974]: DEBUG oslo_concurrency.lockutils [req-fa1ce30b-4c7b-4ce9-9d1d-a0b5f9319354 req-67e06e3d-73b0-4c15-a089-874321c6f301 service nova] Releasing lock "refresh_cache-f88f0ef2-24f2-4eef-92a3-8de2ebb6944a" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 921.091728] env[61974]: DEBUG nova.compute.manager [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 921.121201] env[61974]: DEBUG nova.virt.hardware [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 921.121454] env[61974]: DEBUG nova.virt.hardware [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 921.121613] env[61974]: DEBUG nova.virt.hardware [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 921.121790] env[61974]: DEBUG nova.virt.hardware [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 921.121934] env[61974]: DEBUG nova.virt.hardware [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 921.122093] env[61974]: DEBUG nova.virt.hardware [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 921.122305] env[61974]: DEBUG nova.virt.hardware [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 921.122466] env[61974]: DEBUG nova.virt.hardware [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 921.122629] env[61974]: DEBUG nova.virt.hardware [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 921.122786] env[61974]: DEBUG nova.virt.hardware [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 921.122960] env[61974]: DEBUG nova.virt.hardware [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 921.123862] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41eb24c7-e41b-4531-a815-94371798ccab {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.134359] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3acf45b-9121-470f-af0a-98159c1f721c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.163214] env[61974]: DEBUG nova.network.neutron [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Updating instance_info_cache with network_info: [{"id": "31fec0d1-b5b6-4c0c-ba81-11fad03cfa19", "address": "fa:16:3e:7f:c8:b0", "network": {"id": "b42774a0-686b-4132-a599-7cec777b9919", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1826867553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dae05232e0041dba49b0432d64d82d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31fec0d1-b5", "ovs_interfaceid": "31fec0d1-b5b6-4c0c-ba81-11fad03cfa19", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.319388] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379009, 'name': PowerOnVM_Task} progress is 94%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.373245] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquiring lock "a9edbd98-3e67-476b-934d-15d893a62d02" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.373609] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Lock "a9edbd98-3e67-476b-934d-15d893a62d02" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.373908] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquiring lock "a9edbd98-3e67-476b-934d-15d893a62d02-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.374134] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Lock "a9edbd98-3e67-476b-934d-15d893a62d02-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.374421] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Lock "a9edbd98-3e67-476b-934d-15d893a62d02-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.376712] env[61974]: INFO nova.compute.manager [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Terminating instance [ 921.378976] env[61974]: DEBUG nova.compute.manager [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 921.379243] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 921.380293] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9d08b6-dc10-4e1c-8b11-5048e4189ad9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.388206] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 921.388477] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce5531d8-24a0-44ea-a854-5029ff93762c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.395890] env[61974]: DEBUG oslo_vmware.api [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Waiting for the task: (returnval){ [ 921.395890] env[61974]: value = "task-1379010" [ 921.395890] env[61974]: _type = "Task" [ 921.395890] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.404532] env[61974]: DEBUG oslo_vmware.api [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1379010, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.417484] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379007, 'name': CreateVM_Task, 'duration_secs': 1.216805} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.417684] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 921.418454] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.418618] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.418979] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 921.419274] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a207a16e-6b45-4709-821c-8be9fc6cca23 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.424033] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Waiting for the task: (returnval){ [ 921.424033] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52409584-8caa-7fe6-78bc-a44a099ccd0e" [ 921.424033] env[61974]: _type = "Task" [ 921.424033] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.432243] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52409584-8caa-7fe6-78bc-a44a099ccd0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.481934] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.418s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.482485] env[61974]: DEBUG nova.compute.manager [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 921.485127] env[61974]: DEBUG oslo_concurrency.lockutils [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.844s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.486531] env[61974]: INFO nova.compute.claims [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 921.666877] env[61974]: DEBUG oslo_concurrency.lockutils [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "refresh_cache-f0601d26-4e29-4946-bb52-50e2a2163535" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 921.667501] env[61974]: DEBUG nova.compute.manager [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Instance network_info: |[{"id": "31fec0d1-b5b6-4c0c-ba81-11fad03cfa19", "address": "fa:16:3e:7f:c8:b0", "network": {"id": "b42774a0-686b-4132-a599-7cec777b9919", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1826867553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dae05232e0041dba49b0432d64d82d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31fec0d1-b5", "ovs_interfaceid": "31fec0d1-b5b6-4c0c-ba81-11fad03cfa19", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 921.667672] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:c8:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f4399275-8e92-4448-be9e-d4984e93e89c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '31fec0d1-b5b6-4c0c-ba81-11fad03cfa19', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 921.679816] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Creating folder: Project (7dae05232e0041dba49b0432d64d82d2). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 921.680259] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff8ff6b8-b1ad-41c5-8ead-2b0c5b50600d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.692488] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Created folder: Project (7dae05232e0041dba49b0432d64d82d2) in parent group-v292912. [ 921.692725] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Creating folder: Instances. Parent ref: group-v292945. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 921.692930] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-da04bb1d-4532-4bde-b26e-18064c5c353a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.703404] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Created folder: Instances in parent group-v292945. [ 921.703635] env[61974]: DEBUG oslo.service.loopingcall [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 921.703823] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 921.704101] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c9958503-e8e4-4a58-9f83-762787ed7de3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.721509] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 921.721509] env[61974]: value = "task-1379013" [ 921.721509] env[61974]: _type = "Task" [ 921.721509] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.728834] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379013, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.819858] env[61974]: DEBUG oslo_vmware.api [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379009, 'name': PowerOnVM_Task, 'duration_secs': 0.662531} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.819858] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 921.819858] env[61974]: INFO nova.compute.manager [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Took 9.84 seconds to spawn the instance on the hypervisor. [ 921.820145] env[61974]: DEBUG nova.compute.manager [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 921.820877] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4359cb67-2972-4735-91e9-5238780bb5f2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.907996] env[61974]: DEBUG oslo_vmware.api [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1379010, 'name': PowerOffVM_Task, 'duration_secs': 0.294434} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.908483] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 921.908796] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 921.909210] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4c8e880-c568-4099-8260-969d82006194 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.921791] env[61974]: DEBUG nova.compute.manager [req-ce675359-c548-4b64-9c39-aeb0957649ad req-f2218543-6684-40a5-bd22-6a2c7be38fb3 service nova] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Received event network-changed-31fec0d1-b5b6-4c0c-ba81-11fad03cfa19 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 921.921959] env[61974]: DEBUG nova.compute.manager [req-ce675359-c548-4b64-9c39-aeb0957649ad req-f2218543-6684-40a5-bd22-6a2c7be38fb3 service nova] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Refreshing instance network info cache due to event network-changed-31fec0d1-b5b6-4c0c-ba81-11fad03cfa19. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 921.922208] env[61974]: DEBUG oslo_concurrency.lockutils [req-ce675359-c548-4b64-9c39-aeb0957649ad req-f2218543-6684-40a5-bd22-6a2c7be38fb3 service nova] Acquiring lock "refresh_cache-f0601d26-4e29-4946-bb52-50e2a2163535" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.922367] env[61974]: DEBUG oslo_concurrency.lockutils [req-ce675359-c548-4b64-9c39-aeb0957649ad req-f2218543-6684-40a5-bd22-6a2c7be38fb3 service nova] Acquired lock "refresh_cache-f0601d26-4e29-4946-bb52-50e2a2163535" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.922540] env[61974]: DEBUG nova.network.neutron [req-ce675359-c548-4b64-9c39-aeb0957649ad req-f2218543-6684-40a5-bd22-6a2c7be38fb3 service nova] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Refreshing network info cache for port 31fec0d1-b5b6-4c0c-ba81-11fad03cfa19 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 921.924287] env[61974]: DEBUG nova.compute.manager [req-31895011-0463-434a-aeb1-51e5ccad46ab req-54b870e7-74c7-4c74-8563-60ebcae87672 service nova] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Received event network-vif-plugged-f76d592c-5eee-4379-b971-9896eb2bb538 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 921.924508] env[61974]: DEBUG oslo_concurrency.lockutils [req-31895011-0463-434a-aeb1-51e5ccad46ab req-54b870e7-74c7-4c74-8563-60ebcae87672 service nova] Acquiring lock "1c1404fd-a954-4849-883b-7898a7e87e2b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.924718] env[61974]: DEBUG oslo_concurrency.lockutils [req-31895011-0463-434a-aeb1-51e5ccad46ab req-54b870e7-74c7-4c74-8563-60ebcae87672 service nova] Lock "1c1404fd-a954-4849-883b-7898a7e87e2b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.924896] env[61974]: DEBUG oslo_concurrency.lockutils [req-31895011-0463-434a-aeb1-51e5ccad46ab req-54b870e7-74c7-4c74-8563-60ebcae87672 service nova] Lock "1c1404fd-a954-4849-883b-7898a7e87e2b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.925088] env[61974]: DEBUG nova.compute.manager [req-31895011-0463-434a-aeb1-51e5ccad46ab req-54b870e7-74c7-4c74-8563-60ebcae87672 service nova] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] No waiting events found dispatching network-vif-plugged-f76d592c-5eee-4379-b971-9896eb2bb538 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 921.925320] env[61974]: WARNING nova.compute.manager [req-31895011-0463-434a-aeb1-51e5ccad46ab req-54b870e7-74c7-4c74-8563-60ebcae87672 service nova] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Received unexpected event network-vif-plugged-f76d592c-5eee-4379-b971-9896eb2bb538 for instance with vm_state building and task_state spawning. [ 921.937243] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52409584-8caa-7fe6-78bc-a44a099ccd0e, 'name': SearchDatastore_Task, 'duration_secs': 0.013845} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.937334] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 921.937730] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 921.938150] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.938429] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.938700] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 921.939365] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a3f37ef-923b-4962-a68a-0cffebf2a6b6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.949637] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 921.949637] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 921.949878] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98f367f0-a428-454b-b6c3-ea1b6a180678 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.955179] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Waiting for the task: (returnval){ [ 921.955179] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52488e74-d37d-0987-3b46-72842258ce1e" [ 921.955179] env[61974]: _type = "Task" [ 921.955179] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.964352] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52488e74-d37d-0987-3b46-72842258ce1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.971477] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 921.971718] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 921.971948] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Deleting the datastore file [datastore2] a9edbd98-3e67-476b-934d-15d893a62d02 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 921.972329] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3cd4b091-ddf2-49f1-b18a-68533d4b7927 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.980457] env[61974]: DEBUG oslo_vmware.api [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Waiting for the task: (returnval){ [ 921.980457] env[61974]: value = "task-1379015" [ 921.980457] env[61974]: _type = "Task" [ 921.980457] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.987049] env[61974]: DEBUG oslo_vmware.api [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1379015, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.990636] env[61974]: DEBUG nova.compute.utils [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 921.994465] env[61974]: DEBUG nova.compute.manager [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 921.994806] env[61974]: DEBUG nova.network.neutron [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 922.042241] env[61974]: DEBUG nova.policy [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5fb3973c32a645fb82106b90ee5e33a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd104a741ebad47748ae5646356589fce', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 922.231556] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379013, 'name': CreateVM_Task, 'duration_secs': 0.36505} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.231835] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 922.232437] env[61974]: DEBUG oslo_concurrency.lockutils [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.232793] env[61974]: DEBUG oslo_concurrency.lockutils [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.232913] env[61974]: DEBUG oslo_concurrency.lockutils [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 922.233205] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-495c3eb3-02dd-491a-bdb2-93cd93b4155c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.237903] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 922.237903] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52df9202-a041-8fd4-6fe9-0ba2ec46b0e4" [ 922.237903] env[61974]: _type = "Task" [ 922.237903] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.253156] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52df9202-a041-8fd4-6fe9-0ba2ec46b0e4, 'name': SearchDatastore_Task, 'duration_secs': 0.009118} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.253355] env[61974]: DEBUG oslo_concurrency.lockutils [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.253597] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 922.253800] env[61974]: DEBUG oslo_concurrency.lockutils [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.340462] env[61974]: INFO nova.compute.manager [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Took 27.70 seconds to build instance. [ 922.384359] env[61974]: DEBUG nova.network.neutron [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Successfully created port: 0e8f6fb0-8b0e-4320-b636-cd468b8bfda6 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 922.466069] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52488e74-d37d-0987-3b46-72842258ce1e, 'name': SearchDatastore_Task, 'duration_secs': 0.011262} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.466949] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6178d10-05a0-4827-b2df-6c593aa3a639 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.472614] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Waiting for the task: (returnval){ [ 922.472614] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52817d6a-3b4c-b3cc-8afc-eea7c064e544" [ 922.472614] env[61974]: _type = "Task" [ 922.472614] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.480241] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52817d6a-3b4c-b3cc-8afc-eea7c064e544, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.489497] env[61974]: DEBUG oslo_vmware.api [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Task: {'id': task-1379015, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.310748} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.489745] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 922.489961] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 922.490249] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 922.490463] env[61974]: INFO nova.compute.manager [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Took 1.11 seconds to destroy the instance on the hypervisor. [ 922.490720] env[61974]: DEBUG oslo.service.loopingcall [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 922.490913] env[61974]: DEBUG nova.compute.manager [-] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 922.491016] env[61974]: DEBUG nova.network.neutron [-] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 922.495562] env[61974]: DEBUG nova.compute.manager [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 922.511192] env[61974]: DEBUG nova.network.neutron [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Successfully updated port: f76d592c-5eee-4379-b971-9896eb2bb538 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 922.837126] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f251fd82-7aa6-403a-a8cd-07205bbc467f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.846597] env[61974]: DEBUG oslo_concurrency.lockutils [None req-342b4a09-0dfc-401f-b25b-d659e607d0c9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "635f362a-582e-44bc-85d8-8a69943982b0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 118.434s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.846597] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a68488c-3e34-43d3-acf7-2786d39ffe7d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.850424] env[61974]: DEBUG nova.network.neutron [req-ce675359-c548-4b64-9c39-aeb0957649ad req-f2218543-6684-40a5-bd22-6a2c7be38fb3 service nova] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Updated VIF entry in instance network info cache for port 31fec0d1-b5b6-4c0c-ba81-11fad03cfa19. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 922.850737] env[61974]: DEBUG nova.network.neutron [req-ce675359-c548-4b64-9c39-aeb0957649ad req-f2218543-6684-40a5-bd22-6a2c7be38fb3 service nova] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Updating instance_info_cache with network_info: [{"id": "31fec0d1-b5b6-4c0c-ba81-11fad03cfa19", "address": "fa:16:3e:7f:c8:b0", "network": {"id": "b42774a0-686b-4132-a599-7cec777b9919", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1826867553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dae05232e0041dba49b0432d64d82d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31fec0d1-b5", "ovs_interfaceid": "31fec0d1-b5b6-4c0c-ba81-11fad03cfa19", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.882339] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260d82ba-aeca-42df-8464-37b122fe6dc6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.890309] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e42be4e-4985-418b-9246-d5e9ab6976bd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.898017] env[61974]: DEBUG nova.compute.manager [req-3c2c95db-72aa-475d-ac89-c12dd2b21b21 req-13cd8d21-895e-4eb2-bf44-8c1f5232094d service nova] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Received event network-vif-deleted-b3427315-9e46-4452-9a4b-43147b6cbc63 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 922.898017] env[61974]: INFO nova.compute.manager [req-3c2c95db-72aa-475d-ac89-c12dd2b21b21 req-13cd8d21-895e-4eb2-bf44-8c1f5232094d service nova] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Neutron deleted interface b3427315-9e46-4452-9a4b-43147b6cbc63; detaching it from the instance and deleting it from the info cache [ 922.898249] env[61974]: DEBUG nova.network.neutron [req-3c2c95db-72aa-475d-ac89-c12dd2b21b21 req-13cd8d21-895e-4eb2-bf44-8c1f5232094d service nova] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.911645] env[61974]: DEBUG nova.compute.provider_tree [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.913257] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0c458dea-dbe6-4dc7-99aa-6b4c9b2d7f58 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.922569] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-733b74e1-8399-4205-82a1-b6f38ef448e5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.946285] env[61974]: DEBUG nova.compute.manager [req-3c2c95db-72aa-475d-ac89-c12dd2b21b21 req-13cd8d21-895e-4eb2-bf44-8c1f5232094d service nova] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Detach interface failed, port_id=b3427315-9e46-4452-9a4b-43147b6cbc63, reason: Instance a9edbd98-3e67-476b-934d-15d893a62d02 could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 922.983156] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52817d6a-3b4c-b3cc-8afc-eea7c064e544, 'name': SearchDatastore_Task, 'duration_secs': 0.011647} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.983438] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.983693] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] f88f0ef2-24f2-4eef-92a3-8de2ebb6944a/f88f0ef2-24f2-4eef-92a3-8de2ebb6944a.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 922.983967] env[61974]: DEBUG oslo_concurrency.lockutils [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.984168] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 922.984385] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fa406e18-261c-42c5-ae90-72e8b0c8b844 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.987585] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04eebb03-b685-4798-a96a-ebeadcd77594 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.998934] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Waiting for the task: (returnval){ [ 922.998934] env[61974]: value = "task-1379016" [ 922.998934] env[61974]: _type = "Task" [ 922.998934] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.998934] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 922.998934] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 923.003736] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d168651-e1b6-4b0c-b292-14c939d72392 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.012998] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 923.012998] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52ecfaf5-3471-d2d1-ff1a-4ec76a84a022" [ 923.012998] env[61974]: _type = "Task" [ 923.012998] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.016582] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "refresh_cache-1c1404fd-a954-4849-883b-7898a7e87e2b" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 923.016719] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquired lock "refresh_cache-1c1404fd-a954-4849-883b-7898a7e87e2b" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.016868] env[61974]: DEBUG nova.network.neutron [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 923.017858] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': task-1379016, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.026471] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52ecfaf5-3471-d2d1-ff1a-4ec76a84a022, 'name': SearchDatastore_Task, 'duration_secs': 0.008121} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.027246] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa44176d-b80f-4859-8d3f-13fc19be3655 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.033162] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 923.033162] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5246255c-2b2d-9436-0601-9800f9accb96" [ 923.033162] env[61974]: _type = "Task" [ 923.033162] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.044501] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5246255c-2b2d-9436-0601-9800f9accb96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.232120] env[61974]: DEBUG nova.network.neutron [-] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.355707] env[61974]: DEBUG nova.compute.manager [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 923.359029] env[61974]: DEBUG oslo_concurrency.lockutils [req-ce675359-c548-4b64-9c39-aeb0957649ad req-f2218543-6684-40a5-bd22-6a2c7be38fb3 service nova] Releasing lock "refresh_cache-f0601d26-4e29-4946-bb52-50e2a2163535" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.417976] env[61974]: DEBUG nova.scheduler.client.report [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 923.497370] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6592f59d-bd4a-4a35-a257-611bbc096478 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.510434] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': task-1379016, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.512435] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-5403d715-5597-488a-b5f5-9d875ed6c7fa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Suspending the VM {{(pid=61974) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 923.512734] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-9a38ea9b-d6f8-458e-aa3d-07e9ee5825b4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.520425] env[61974]: DEBUG nova.compute.manager [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 923.526267] env[61974]: DEBUG oslo_vmware.api [None req-5403d715-5597-488a-b5f5-9d875ed6c7fa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 923.526267] env[61974]: value = "task-1379017" [ 923.526267] env[61974]: _type = "Task" [ 923.526267] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.537455] env[61974]: DEBUG oslo_vmware.api [None req-5403d715-5597-488a-b5f5-9d875ed6c7fa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379017, 'name': SuspendVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.547905] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5246255c-2b2d-9436-0601-9800f9accb96, 'name': SearchDatastore_Task, 'duration_secs': 0.0082} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.548316] env[61974]: DEBUG oslo_concurrency.lockutils [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.548608] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] f0601d26-4e29-4946-bb52-50e2a2163535/f0601d26-4e29-4946-bb52-50e2a2163535.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 923.548908] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c382808-4949-41eb-8bdf-45dcbb5cd796 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.554795] env[61974]: DEBUG nova.virt.hardware [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 923.555133] env[61974]: DEBUG nova.virt.hardware [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 923.555305] env[61974]: DEBUG nova.virt.hardware [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 923.555494] env[61974]: DEBUG nova.virt.hardware [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 923.555642] env[61974]: DEBUG nova.virt.hardware [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 923.555827] env[61974]: DEBUG nova.virt.hardware [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 923.556082] env[61974]: DEBUG nova.virt.hardware [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 923.556254] env[61974]: DEBUG nova.virt.hardware [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 923.556424] env[61974]: DEBUG nova.virt.hardware [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 923.556645] env[61974]: DEBUG nova.virt.hardware [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 923.556939] env[61974]: DEBUG nova.virt.hardware [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 923.557864] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c82aa07-5229-4c51-803c-8fd4f0685788 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.562796] env[61974]: DEBUG nova.network.neutron [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 923.565087] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 923.565087] env[61974]: value = "task-1379018" [ 923.565087] env[61974]: _type = "Task" [ 923.565087] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.574294] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-811ffb1e-30a0-4ba9-ad67-05753fd462e0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.582499] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379018, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.735810] env[61974]: INFO nova.compute.manager [-] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Took 1.24 seconds to deallocate network for instance. [ 923.748019] env[61974]: DEBUG nova.network.neutron [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Updating instance_info_cache with network_info: [{"id": "f76d592c-5eee-4379-b971-9896eb2bb538", "address": "fa:16:3e:d0:c9:51", "network": {"id": "be36ebfc-3548-4420-b5b4-b3efb499516a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1190763400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61c671d85b64b28872586c2816b83f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf76d592c-5e", "ovs_interfaceid": "f76d592c-5eee-4379-b971-9896eb2bb538", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.879565] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.923324] env[61974]: DEBUG oslo_concurrency.lockutils [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.438s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.923927] env[61974]: DEBUG nova.compute.manager [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 923.926984] env[61974]: DEBUG oslo_concurrency.lockutils [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.430s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.928768] env[61974]: INFO nova.compute.claims [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 923.951246] env[61974]: DEBUG nova.compute.manager [req-536a10c8-84ae-47ae-9af8-79996bd4cd06 req-642f1878-2b37-4174-85b8-df1870463ce0 service nova] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Received event network-changed-f76d592c-5eee-4379-b971-9896eb2bb538 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 923.951461] env[61974]: DEBUG nova.compute.manager [req-536a10c8-84ae-47ae-9af8-79996bd4cd06 req-642f1878-2b37-4174-85b8-df1870463ce0 service nova] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Refreshing instance network info cache due to event network-changed-f76d592c-5eee-4379-b971-9896eb2bb538. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 923.951660] env[61974]: DEBUG oslo_concurrency.lockutils [req-536a10c8-84ae-47ae-9af8-79996bd4cd06 req-642f1878-2b37-4174-85b8-df1870463ce0 service nova] Acquiring lock "refresh_cache-1c1404fd-a954-4849-883b-7898a7e87e2b" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.011883] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': task-1379016, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.632603} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.012181] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] f88f0ef2-24f2-4eef-92a3-8de2ebb6944a/f88f0ef2-24f2-4eef-92a3-8de2ebb6944a.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 924.012394] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 924.012667] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5852166a-0ae0-4301-96b8-348bf4311fb4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.018877] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Waiting for the task: (returnval){ [ 924.018877] env[61974]: value = "task-1379019" [ 924.018877] env[61974]: _type = "Task" [ 924.018877] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.028260] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': task-1379019, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.036215] env[61974]: DEBUG oslo_vmware.api [None req-5403d715-5597-488a-b5f5-9d875ed6c7fa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379017, 'name': SuspendVM_Task} progress is 70%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.076813] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379018, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.243637] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.250508] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Releasing lock "refresh_cache-1c1404fd-a954-4849-883b-7898a7e87e2b" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.250877] env[61974]: DEBUG nova.compute.manager [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Instance network_info: |[{"id": "f76d592c-5eee-4379-b971-9896eb2bb538", "address": "fa:16:3e:d0:c9:51", "network": {"id": "be36ebfc-3548-4420-b5b4-b3efb499516a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1190763400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61c671d85b64b28872586c2816b83f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf76d592c-5e", "ovs_interfaceid": "f76d592c-5eee-4379-b971-9896eb2bb538", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 924.251325] env[61974]: DEBUG oslo_concurrency.lockutils [req-536a10c8-84ae-47ae-9af8-79996bd4cd06 req-642f1878-2b37-4174-85b8-df1870463ce0 service nova] Acquired lock "refresh_cache-1c1404fd-a954-4849-883b-7898a7e87e2b" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.251511] env[61974]: DEBUG nova.network.neutron [req-536a10c8-84ae-47ae-9af8-79996bd4cd06 req-642f1878-2b37-4174-85b8-df1870463ce0 service nova] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Refreshing network info cache for port f76d592c-5eee-4379-b971-9896eb2bb538 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 924.253151] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:c9:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e05affa-2640-435e-a124-0ee8a6ab1152', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f76d592c-5eee-4379-b971-9896eb2bb538', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 924.261362] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Creating folder: Project (c61c671d85b64b28872586c2816b83f8). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 924.262619] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b3b126d9-c69a-4024-b4fd-6a968b3b7287 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.274533] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Created folder: Project (c61c671d85b64b28872586c2816b83f8) in parent group-v292912. [ 924.274800] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Creating folder: Instances. Parent ref: group-v292948. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 924.275365] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-90c63850-12c3-4af6-a562-f20990ac72a0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.284728] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Created folder: Instances in parent group-v292948. [ 924.285042] env[61974]: DEBUG oslo.service.loopingcall [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 924.285282] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 924.285533] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f479780-138c-43e5-a21d-f7048cd109c0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.310386] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 924.310386] env[61974]: value = "task-1379022" [ 924.310386] env[61974]: _type = "Task" [ 924.310386] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.322092] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379022, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.322417] env[61974]: DEBUG nova.network.neutron [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Successfully updated port: 0e8f6fb0-8b0e-4320-b636-cd468b8bfda6 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 924.434372] env[61974]: DEBUG nova.compute.utils [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 924.437691] env[61974]: DEBUG nova.compute.manager [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 924.437820] env[61974]: DEBUG nova.network.neutron [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 924.494585] env[61974]: DEBUG nova.policy [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7d000dbe94f14f7296a630ae8c8f1353', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a1a2f7a8ac448ca8d5e0306eefb1d97', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 924.529948] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': task-1379019, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.128298} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.532751] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 924.533871] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9532d7-45ab-4545-a3f1-f49b36cf8a75 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.544150] env[61974]: DEBUG oslo_vmware.api [None req-5403d715-5597-488a-b5f5-9d875ed6c7fa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379017, 'name': SuspendVM_Task, 'duration_secs': 0.939958} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.552763] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-5403d715-5597-488a-b5f5-9d875ed6c7fa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Suspended the VM {{(pid=61974) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 924.553143] env[61974]: DEBUG nova.compute.manager [None req-5403d715-5597-488a-b5f5-9d875ed6c7fa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 924.561938] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] f88f0ef2-24f2-4eef-92a3-8de2ebb6944a/f88f0ef2-24f2-4eef-92a3-8de2ebb6944a.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 924.565048] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2bd1dc-b8c5-4402-82ed-70e9f4a48a06 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.566517] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0853feb1-cbd9-4058-9e2a-53dadc70a9cc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.592343] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Waiting for the task: (returnval){ [ 924.592343] env[61974]: value = "task-1379023" [ 924.592343] env[61974]: _type = "Task" [ 924.592343] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.603923] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': task-1379023, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.606150] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379018, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.785828} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.606150] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] f0601d26-4e29-4946-bb52-50e2a2163535/f0601d26-4e29-4946-bb52-50e2a2163535.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 924.606150] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 924.606150] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-39bd3f91-3176-4f13-bb9e-d985aa5fa79a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.612249] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 924.612249] env[61974]: value = "task-1379024" [ 924.612249] env[61974]: _type = "Task" [ 924.612249] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.623438] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379024, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.819795] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379022, 'name': CreateVM_Task, 'duration_secs': 0.402767} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.820091] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 924.820830] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.821070] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.821451] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 924.821715] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55f77a0c-78ee-4805-90f8-ceb44e40e5e9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.825054] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "refresh_cache-1aa2a63c-e352-4c9b-9445-9b45bf3ae14c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.825201] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired lock "refresh_cache-1aa2a63c-e352-4c9b-9445-9b45bf3ae14c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.825365] env[61974]: DEBUG nova.network.neutron [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 924.827524] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 924.827524] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]520b4a6e-867f-3648-fe7d-d9027adcb15b" [ 924.827524] env[61974]: _type = "Task" [ 924.827524] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.836731] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]520b4a6e-867f-3648-fe7d-d9027adcb15b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.848415] env[61974]: DEBUG nova.network.neutron [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Successfully created port: 7f8230e7-7883-4de2-bf5f-ffa36751a171 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 924.938458] env[61974]: DEBUG nova.compute.manager [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 925.093293] env[61974]: DEBUG nova.network.neutron [req-536a10c8-84ae-47ae-9af8-79996bd4cd06 req-642f1878-2b37-4174-85b8-df1870463ce0 service nova] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Updated VIF entry in instance network info cache for port f76d592c-5eee-4379-b971-9896eb2bb538. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 925.093670] env[61974]: DEBUG nova.network.neutron [req-536a10c8-84ae-47ae-9af8-79996bd4cd06 req-642f1878-2b37-4174-85b8-df1870463ce0 service nova] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Updating instance_info_cache with network_info: [{"id": "f76d592c-5eee-4379-b971-9896eb2bb538", "address": "fa:16:3e:d0:c9:51", "network": {"id": "be36ebfc-3548-4420-b5b4-b3efb499516a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1190763400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61c671d85b64b28872586c2816b83f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf76d592c-5e", "ovs_interfaceid": "f76d592c-5eee-4379-b971-9896eb2bb538", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.110861] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': task-1379023, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.124953] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379024, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086432} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.125404] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 925.125937] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89967035-5ed7-4a2c-9b27-7b837da31993 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.150502] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] f0601d26-4e29-4946-bb52-50e2a2163535/f0601d26-4e29-4946-bb52-50e2a2163535.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 925.153136] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba11f1b7-a885-452c-88ae-f15c847428c0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.173438] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 925.173438] env[61974]: value = "task-1379025" [ 925.173438] env[61974]: _type = "Task" [ 925.173438] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.184383] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379025, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.252456] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd41148-5e7f-48ee-9a3c-a77232928455 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.259940] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69517b1e-cb27-4faf-ad71-4174670db846 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.291931] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41002153-947b-4d8a-98e2-bed6950cb40a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.299947] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd3635cc-bcf7-4de8-a02c-3407181b4b54 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.313871] env[61974]: DEBUG nova.compute.provider_tree [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 925.339979] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]520b4a6e-867f-3648-fe7d-d9027adcb15b, 'name': SearchDatastore_Task, 'duration_secs': 0.009943} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.340528] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.340783] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 925.341032] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.341189] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.341372] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 925.341642] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6adf6f6e-3241-4a21-a2d2-5698f7610f54 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.351012] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 925.351210] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 925.352016] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-393cdf87-b6a9-4c62-a2d9-4ce5e755bd20 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.357332] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 925.357332] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52da5459-f105-3eb8-1908-93e9aad27886" [ 925.357332] env[61974]: _type = "Task" [ 925.357332] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.361331] env[61974]: DEBUG nova.network.neutron [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 925.367412] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52da5459-f105-3eb8-1908-93e9aad27886, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.562269] env[61974]: DEBUG nova.network.neutron [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Updating instance_info_cache with network_info: [{"id": "0e8f6fb0-8b0e-4320-b636-cd468b8bfda6", "address": "fa:16:3e:a3:2a:39", "network": {"id": "615a7a34-a392-45bd-ba4d-7b39605e520b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432153827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d104a741ebad47748ae5646356589fce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e8f6fb0-8b", "ovs_interfaceid": "0e8f6fb0-8b0e-4320-b636-cd468b8bfda6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.598131] env[61974]: DEBUG oslo_concurrency.lockutils [req-536a10c8-84ae-47ae-9af8-79996bd4cd06 req-642f1878-2b37-4174-85b8-df1870463ce0 service nova] Releasing lock "refresh_cache-1c1404fd-a954-4849-883b-7898a7e87e2b" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.607952] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': task-1379023, 'name': ReconfigVM_Task, 'duration_secs': 0.625989} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.608276] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Reconfigured VM instance instance-00000041 to attach disk [datastore1] f88f0ef2-24f2-4eef-92a3-8de2ebb6944a/f88f0ef2-24f2-4eef-92a3-8de2ebb6944a.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 925.609220] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d35b0b54-7591-43cd-8c93-2347f7f5d262 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.615436] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Waiting for the task: (returnval){ [ 925.615436] env[61974]: value = "task-1379026" [ 925.615436] env[61974]: _type = "Task" [ 925.615436] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.623365] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': task-1379026, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.683202] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379025, 'name': ReconfigVM_Task, 'duration_secs': 0.333643} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.683481] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Reconfigured VM instance instance-00000042 to attach disk [datastore1] f0601d26-4e29-4946-bb52-50e2a2163535/f0601d26-4e29-4946-bb52-50e2a2163535.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 925.684155] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f8b379c-9f6a-4905-ac04-1d5a368cd187 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.691104] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 925.691104] env[61974]: value = "task-1379027" [ 925.691104] env[61974]: _type = "Task" [ 925.691104] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.702514] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379027, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.817462] env[61974]: DEBUG nova.scheduler.client.report [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 925.868422] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52da5459-f105-3eb8-1908-93e9aad27886, 'name': SearchDatastore_Task, 'duration_secs': 0.010108} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.869731] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6c0a17a-cbeb-4822-beb1-e1a7587b0d12 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.875492] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 925.875492] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52ed9e45-e589-fbb2-ae64-212907e85f0f" [ 925.875492] env[61974]: _type = "Task" [ 925.875492] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.884794] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52ed9e45-e589-fbb2-ae64-212907e85f0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.951686] env[61974]: DEBUG nova.compute.manager [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 925.976671] env[61974]: DEBUG nova.virt.hardware [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 925.976926] env[61974]: DEBUG nova.virt.hardware [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 925.977107] env[61974]: DEBUG nova.virt.hardware [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 925.977296] env[61974]: DEBUG nova.virt.hardware [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 925.977440] env[61974]: DEBUG nova.virt.hardware [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 925.977585] env[61974]: DEBUG nova.virt.hardware [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 925.977791] env[61974]: DEBUG nova.virt.hardware [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 925.977949] env[61974]: DEBUG nova.virt.hardware [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 925.978180] env[61974]: DEBUG nova.virt.hardware [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 925.978360] env[61974]: DEBUG nova.virt.hardware [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 925.978559] env[61974]: DEBUG nova.virt.hardware [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 925.979410] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8932c5c0-e889-41d6-9525-99484769a101 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.987688] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff9d6a7-8726-456b-aa60-4c42788c152b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.003209] env[61974]: DEBUG nova.compute.manager [req-a96aaaf6-97fe-4e2e-9fd8-fce8b0e8d2d2 req-71c5adfb-3728-461a-a4ae-868d2be7aaa4 service nova] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Received event network-vif-plugged-0e8f6fb0-8b0e-4320-b636-cd468b8bfda6 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 926.003467] env[61974]: DEBUG oslo_concurrency.lockutils [req-a96aaaf6-97fe-4e2e-9fd8-fce8b0e8d2d2 req-71c5adfb-3728-461a-a4ae-868d2be7aaa4 service nova] Acquiring lock "1aa2a63c-e352-4c9b-9445-9b45bf3ae14c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.003681] env[61974]: DEBUG oslo_concurrency.lockutils [req-a96aaaf6-97fe-4e2e-9fd8-fce8b0e8d2d2 req-71c5adfb-3728-461a-a4ae-868d2be7aaa4 service nova] Lock "1aa2a63c-e352-4c9b-9445-9b45bf3ae14c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.003850] env[61974]: DEBUG oslo_concurrency.lockutils [req-a96aaaf6-97fe-4e2e-9fd8-fce8b0e8d2d2 req-71c5adfb-3728-461a-a4ae-868d2be7aaa4 service nova] Lock "1aa2a63c-e352-4c9b-9445-9b45bf3ae14c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.004036] env[61974]: DEBUG nova.compute.manager [req-a96aaaf6-97fe-4e2e-9fd8-fce8b0e8d2d2 req-71c5adfb-3728-461a-a4ae-868d2be7aaa4 service nova] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] No waiting events found dispatching network-vif-plugged-0e8f6fb0-8b0e-4320-b636-cd468b8bfda6 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 926.004212] env[61974]: WARNING nova.compute.manager [req-a96aaaf6-97fe-4e2e-9fd8-fce8b0e8d2d2 req-71c5adfb-3728-461a-a4ae-868d2be7aaa4 service nova] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Received unexpected event network-vif-plugged-0e8f6fb0-8b0e-4320-b636-cd468b8bfda6 for instance with vm_state building and task_state spawning. [ 926.004377] env[61974]: DEBUG nova.compute.manager [req-a96aaaf6-97fe-4e2e-9fd8-fce8b0e8d2d2 req-71c5adfb-3728-461a-a4ae-868d2be7aaa4 service nova] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Received event network-changed-0e8f6fb0-8b0e-4320-b636-cd468b8bfda6 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 926.004530] env[61974]: DEBUG nova.compute.manager [req-a96aaaf6-97fe-4e2e-9fd8-fce8b0e8d2d2 req-71c5adfb-3728-461a-a4ae-868d2be7aaa4 service nova] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Refreshing instance network info cache due to event network-changed-0e8f6fb0-8b0e-4320-b636-cd468b8bfda6. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 926.004694] env[61974]: DEBUG oslo_concurrency.lockutils [req-a96aaaf6-97fe-4e2e-9fd8-fce8b0e8d2d2 req-71c5adfb-3728-461a-a4ae-868d2be7aaa4 service nova] Acquiring lock "refresh_cache-1aa2a63c-e352-4c9b-9445-9b45bf3ae14c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.065952] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Releasing lock "refresh_cache-1aa2a63c-e352-4c9b-9445-9b45bf3ae14c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.066308] env[61974]: DEBUG nova.compute.manager [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Instance network_info: |[{"id": "0e8f6fb0-8b0e-4320-b636-cd468b8bfda6", "address": "fa:16:3e:a3:2a:39", "network": {"id": "615a7a34-a392-45bd-ba4d-7b39605e520b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432153827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d104a741ebad47748ae5646356589fce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e8f6fb0-8b", "ovs_interfaceid": "0e8f6fb0-8b0e-4320-b636-cd468b8bfda6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 926.066604] env[61974]: DEBUG oslo_concurrency.lockutils [req-a96aaaf6-97fe-4e2e-9fd8-fce8b0e8d2d2 req-71c5adfb-3728-461a-a4ae-868d2be7aaa4 service nova] Acquired lock "refresh_cache-1aa2a63c-e352-4c9b-9445-9b45bf3ae14c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.066785] env[61974]: DEBUG nova.network.neutron [req-a96aaaf6-97fe-4e2e-9fd8-fce8b0e8d2d2 req-71c5adfb-3728-461a-a4ae-868d2be7aaa4 service nova] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Refreshing network info cache for port 0e8f6fb0-8b0e-4320-b636-cd468b8bfda6 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 926.068052] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:2a:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa09e855-8af1-419b-b78d-8ffcc94b1bfb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0e8f6fb0-8b0e-4320-b636-cd468b8bfda6', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 926.075774] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Creating folder: Project (d104a741ebad47748ae5646356589fce). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 926.077275] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e11fe457-c541-4579-994b-dfde1aeb709c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.089962] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Created folder: Project (d104a741ebad47748ae5646356589fce) in parent group-v292912. [ 926.089962] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Creating folder: Instances. Parent ref: group-v292951. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 926.090366] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ee792f76-ed75-4213-b49d-4ae637756195 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.099876] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Created folder: Instances in parent group-v292951. [ 926.100133] env[61974]: DEBUG oslo.service.loopingcall [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 926.100326] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 926.100523] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1760e2a5-9f14-4b36-a1af-1d6dd1624649 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.124904] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': task-1379026, 'name': Rename_Task, 'duration_secs': 0.195256} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.126114] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 926.126344] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 926.126344] env[61974]: value = "task-1379030" [ 926.126344] env[61974]: _type = "Task" [ 926.126344] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.126514] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5104174c-cb78-4a86-adce-30a186715851 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.135304] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379030, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.136392] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Waiting for the task: (returnval){ [ 926.136392] env[61974]: value = "task-1379031" [ 926.136392] env[61974]: _type = "Task" [ 926.136392] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.143315] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': task-1379031, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.201034] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379027, 'name': Rename_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.322253] env[61974]: DEBUG oslo_concurrency.lockutils [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.395s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.322836] env[61974]: DEBUG nova.compute.manager [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 926.325623] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.243s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.328028] env[61974]: INFO nova.compute.claims [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 926.386825] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52ed9e45-e589-fbb2-ae64-212907e85f0f, 'name': SearchDatastore_Task, 'duration_secs': 0.011844} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.387184] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.387491] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 1c1404fd-a954-4849-883b-7898a7e87e2b/1c1404fd-a954-4849-883b-7898a7e87e2b.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 926.387809] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f77cb5b4-bcb1-4da0-ba7c-ba3595bf7e57 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.396617] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 926.396617] env[61974]: value = "task-1379032" [ 926.396617] env[61974]: _type = "Task" [ 926.396617] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.406743] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379032, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.555503] env[61974]: DEBUG nova.network.neutron [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Successfully updated port: 7f8230e7-7883-4de2-bf5f-ffa36751a171 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 926.638472] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379030, 'name': CreateVM_Task, 'duration_secs': 0.371633} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.641748] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 926.642969] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.643189] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.643608] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 926.644419] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6203a2db-a374-45d6-9e84-fab56a4c708d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.650051] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': task-1379031, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.653503] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 926.653503] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]525dbead-42c5-9a19-db3a-0e7b7e539dbd" [ 926.653503] env[61974]: _type = "Task" [ 926.653503] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.665243] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]525dbead-42c5-9a19-db3a-0e7b7e539dbd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.701931] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379027, 'name': Rename_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.833269] env[61974]: DEBUG nova.compute.utils [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 926.837227] env[61974]: DEBUG nova.compute.manager [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Not allocating networking since 'none' was specified. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 926.868589] env[61974]: DEBUG nova.network.neutron [req-a96aaaf6-97fe-4e2e-9fd8-fce8b0e8d2d2 req-71c5adfb-3728-461a-a4ae-868d2be7aaa4 service nova] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Updated VIF entry in instance network info cache for port 0e8f6fb0-8b0e-4320-b636-cd468b8bfda6. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 926.868589] env[61974]: DEBUG nova.network.neutron [req-a96aaaf6-97fe-4e2e-9fd8-fce8b0e8d2d2 req-71c5adfb-3728-461a-a4ae-868d2be7aaa4 service nova] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Updating instance_info_cache with network_info: [{"id": "0e8f6fb0-8b0e-4320-b636-cd468b8bfda6", "address": "fa:16:3e:a3:2a:39", "network": {"id": "615a7a34-a392-45bd-ba4d-7b39605e520b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432153827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d104a741ebad47748ae5646356589fce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e8f6fb0-8b", "ovs_interfaceid": "0e8f6fb0-8b0e-4320-b636-cd468b8bfda6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.906856] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379032, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.058409] env[61974]: DEBUG oslo_concurrency.lockutils [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "refresh_cache-b1fa5433-8f26-48db-a19d-d1e11245fb44" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.058559] env[61974]: DEBUG oslo_concurrency.lockutils [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquired lock "refresh_cache-b1fa5433-8f26-48db-a19d-d1e11245fb44" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.058716] env[61974]: DEBUG nova.network.neutron [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 927.095379] env[61974]: DEBUG nova.compute.manager [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 927.096365] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb85e69-d03c-45de-8a9d-3f5209d4a90a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.149347] env[61974]: DEBUG oslo_vmware.api [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': task-1379031, 'name': PowerOnVM_Task, 'duration_secs': 0.60853} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.149722] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 927.150072] env[61974]: INFO nova.compute.manager [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Took 9.83 seconds to spawn the instance on the hypervisor. [ 927.150375] env[61974]: DEBUG nova.compute.manager [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 927.151832] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6362ccf7-13a1-4ae6-aad5-96b644f2594c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.168586] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]525dbead-42c5-9a19-db3a-0e7b7e539dbd, 'name': SearchDatastore_Task, 'duration_secs': 0.053681} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.169081] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.169332] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 927.169566] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.169712] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.169886] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 927.170152] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e60f182b-6ade-4204-b53e-6ebbded59b90 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.177659] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 927.177902] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 927.178627] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7f3b1bc-fc7f-464d-88b1-a72591a9e0aa {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.183953] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 927.183953] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52340806-547c-ae7c-6edf-38d9af2bc1b1" [ 927.183953] env[61974]: _type = "Task" [ 927.183953] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.191962] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52340806-547c-ae7c-6edf-38d9af2bc1b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.201992] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379027, 'name': Rename_Task, 'duration_secs': 1.152457} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.202257] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 927.202478] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-98d280f0-f05f-4c35-9c40-f4d44c43cd67 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.209255] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 927.209255] env[61974]: value = "task-1379033" [ 927.209255] env[61974]: _type = "Task" [ 927.209255] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.216399] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379033, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.337809] env[61974]: DEBUG nova.compute.manager [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 927.371519] env[61974]: DEBUG oslo_concurrency.lockutils [req-a96aaaf6-97fe-4e2e-9fd8-fce8b0e8d2d2 req-71c5adfb-3728-461a-a4ae-868d2be7aaa4 service nova] Releasing lock "refresh_cache-1aa2a63c-e352-4c9b-9445-9b45bf3ae14c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.409700] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379032, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.566444} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.409961] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 1c1404fd-a954-4849-883b-7898a7e87e2b/1c1404fd-a954-4849-883b-7898a7e87e2b.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 927.410200] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 927.410453] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-92f199e8-855c-4291-834c-3a438d8ab771 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.419040] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 927.419040] env[61974]: value = "task-1379034" [ 927.419040] env[61974]: _type = "Task" [ 927.419040] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.429508] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379034, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.591498] env[61974]: DEBUG nova.network.neutron [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 927.595209] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f64881-6ecd-4f6b-ae2c-bc43089a350e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.603317] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af511f2-145b-4631-97f7-c5b526cb4480 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.607185] env[61974]: INFO nova.compute.manager [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] instance snapshotting [ 927.607395] env[61974]: WARNING nova.compute.manager [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 927.612467] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e3ac68b-9fe6-404f-bc4a-86378938def6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.644116] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a0e24c-1239-466d-85b6-c662084763c3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.660605] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7645921-d316-49e0-814d-bc31fe13bba8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.666223] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5340eb8c-304b-4154-ac1f-daaa629bff5b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.688116] env[61974]: DEBUG nova.compute.provider_tree [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.690636] env[61974]: INFO nova.compute.manager [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Took 28.87 seconds to build instance. [ 927.701978] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52340806-547c-ae7c-6edf-38d9af2bc1b1, 'name': SearchDatastore_Task, 'duration_secs': 0.008217} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.702780] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58caa117-ee95-40b1-a0d9-d86432e59401 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.708274] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 927.708274] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]520ec5a7-3829-aede-1fac-ada494adef37" [ 927.708274] env[61974]: _type = "Task" [ 927.708274] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.719917] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]520ec5a7-3829-aede-1fac-ada494adef37, 'name': SearchDatastore_Task, 'duration_secs': 0.009097} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.723056] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.723335] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c/1aa2a63c-e352-4c9b-9445-9b45bf3ae14c.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 927.723808] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379033, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.724034] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aaa3ac65-e633-4d5b-8ec5-f8b53f22cc1e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.730176] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 927.730176] env[61974]: value = "task-1379035" [ 927.730176] env[61974]: _type = "Task" [ 927.730176] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.737611] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379035, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.929697] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379034, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068966} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.930026] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 927.934018] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dccd249-fde8-4358-b8bb-71d504bb87ff {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.961386] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 1c1404fd-a954-4849-883b-7898a7e87e2b/1c1404fd-a954-4849-883b-7898a7e87e2b.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 927.961386] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f3f65c6-4f03-4b9f-8c6e-7517d8637580 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.981217] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 927.981217] env[61974]: value = "task-1379036" [ 927.981217] env[61974]: _type = "Task" [ 927.981217] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.982211] env[61974]: DEBUG nova.network.neutron [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Updating instance_info_cache with network_info: [{"id": "7f8230e7-7883-4de2-bf5f-ffa36751a171", "address": "fa:16:3e:5f:98:3d", "network": {"id": "39aeb319-53cd-43fa-bc5e-cb665f2e4707", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2053105632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a1a2f7a8ac448ca8d5e0306eefb1d97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f8230e7-78", "ovs_interfaceid": "7f8230e7-7883-4de2-bf5f-ffa36751a171", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.992125] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379036, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.061973] env[61974]: DEBUG nova.compute.manager [req-a725a5ac-2601-4eff-9af0-1445543aff32 req-ec9701c9-7ec0-4058-93ba-ff4d808dadc4 service nova] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Received event network-vif-plugged-7f8230e7-7883-4de2-bf5f-ffa36751a171 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 928.062468] env[61974]: DEBUG oslo_concurrency.lockutils [req-a725a5ac-2601-4eff-9af0-1445543aff32 req-ec9701c9-7ec0-4058-93ba-ff4d808dadc4 service nova] Acquiring lock "b1fa5433-8f26-48db-a19d-d1e11245fb44-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.062700] env[61974]: DEBUG oslo_concurrency.lockutils [req-a725a5ac-2601-4eff-9af0-1445543aff32 req-ec9701c9-7ec0-4058-93ba-ff4d808dadc4 service nova] Lock "b1fa5433-8f26-48db-a19d-d1e11245fb44-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.062871] env[61974]: DEBUG oslo_concurrency.lockutils [req-a725a5ac-2601-4eff-9af0-1445543aff32 req-ec9701c9-7ec0-4058-93ba-ff4d808dadc4 service nova] Lock "b1fa5433-8f26-48db-a19d-d1e11245fb44-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.063120] env[61974]: DEBUG nova.compute.manager [req-a725a5ac-2601-4eff-9af0-1445543aff32 req-ec9701c9-7ec0-4058-93ba-ff4d808dadc4 service nova] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] No waiting events found dispatching network-vif-plugged-7f8230e7-7883-4de2-bf5f-ffa36751a171 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 928.063367] env[61974]: WARNING nova.compute.manager [req-a725a5ac-2601-4eff-9af0-1445543aff32 req-ec9701c9-7ec0-4058-93ba-ff4d808dadc4 service nova] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Received unexpected event network-vif-plugged-7f8230e7-7883-4de2-bf5f-ffa36751a171 for instance with vm_state building and task_state spawning. [ 928.063593] env[61974]: DEBUG nova.compute.manager [req-a725a5ac-2601-4eff-9af0-1445543aff32 req-ec9701c9-7ec0-4058-93ba-ff4d808dadc4 service nova] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Received event network-changed-7f8230e7-7883-4de2-bf5f-ffa36751a171 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 928.063801] env[61974]: DEBUG nova.compute.manager [req-a725a5ac-2601-4eff-9af0-1445543aff32 req-ec9701c9-7ec0-4058-93ba-ff4d808dadc4 service nova] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Refreshing instance network info cache due to event network-changed-7f8230e7-7883-4de2-bf5f-ffa36751a171. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 928.064072] env[61974]: DEBUG oslo_concurrency.lockutils [req-a725a5ac-2601-4eff-9af0-1445543aff32 req-ec9701c9-7ec0-4058-93ba-ff4d808dadc4 service nova] Acquiring lock "refresh_cache-b1fa5433-8f26-48db-a19d-d1e11245fb44" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.183840] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Creating Snapshot of the VM instance {{(pid=61974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 928.184425] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3a7d3909-c4cf-4df1-b9f1-ac726043d1bd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.193885] env[61974]: DEBUG oslo_vmware.api [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 928.193885] env[61974]: value = "task-1379037" [ 928.193885] env[61974]: _type = "Task" [ 928.193885] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.201475] env[61974]: DEBUG nova.scheduler.client.report [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 928.204367] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0c1eec2f-e433-4ede-8077-087b1836eea6 tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Lock "f88f0ef2-24f2-4eef-92a3-8de2ebb6944a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 122.386s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.211304] env[61974]: DEBUG oslo_vmware.api [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379037, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.222403] env[61974]: DEBUG oslo_vmware.api [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379033, 'name': PowerOnVM_Task, 'duration_secs': 0.766043} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.222674] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 928.222870] env[61974]: INFO nova.compute.manager [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Took 9.57 seconds to spawn the instance on the hypervisor. [ 928.223079] env[61974]: DEBUG nova.compute.manager [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 928.223905] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e966c6-57cc-4049-b17d-84e7aaa0f998 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.244923] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379035, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.349967] env[61974]: DEBUG nova.compute.manager [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 928.373244] env[61974]: DEBUG nova.virt.hardware [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 928.373902] env[61974]: DEBUG nova.virt.hardware [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 928.373902] env[61974]: DEBUG nova.virt.hardware [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 928.373902] env[61974]: DEBUG nova.virt.hardware [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 928.374111] env[61974]: DEBUG nova.virt.hardware [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 928.374273] env[61974]: DEBUG nova.virt.hardware [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 928.374534] env[61974]: DEBUG nova.virt.hardware [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 928.374700] env[61974]: DEBUG nova.virt.hardware [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 928.374893] env[61974]: DEBUG nova.virt.hardware [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 928.375053] env[61974]: DEBUG nova.virt.hardware [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 928.375234] env[61974]: DEBUG nova.virt.hardware [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 928.376180] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef89de30-6898-4b6a-8ece-e95ec50f725d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.384183] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f9363a-717c-4b46-87bf-b9d23d87161f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.398243] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Instance VIF info [] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 928.403785] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Creating folder: Project (79e32c69f2fa4978b4353c149749349f). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 928.404102] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-707289b0-37f4-48a9-ad9e-186ff190c753 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.414718] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Created folder: Project (79e32c69f2fa4978b4353c149749349f) in parent group-v292912. [ 928.414930] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Creating folder: Instances. Parent ref: group-v292954. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 928.415190] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4b0aafe0-d188-4e9a-b819-b4af6fda6638 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.423102] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Created folder: Instances in parent group-v292954. [ 928.423395] env[61974]: DEBUG oslo.service.loopingcall [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 928.423633] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 928.423870] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b7c1c339-92ba-406b-a747-653b013bbe06 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.440985] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 928.440985] env[61974]: value = "task-1379040" [ 928.440985] env[61974]: _type = "Task" [ 928.440985] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.448472] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379040, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.488064] env[61974]: DEBUG oslo_concurrency.lockutils [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Releasing lock "refresh_cache-b1fa5433-8f26-48db-a19d-d1e11245fb44" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.488485] env[61974]: DEBUG nova.compute.manager [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Instance network_info: |[{"id": "7f8230e7-7883-4de2-bf5f-ffa36751a171", "address": "fa:16:3e:5f:98:3d", "network": {"id": "39aeb319-53cd-43fa-bc5e-cb665f2e4707", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2053105632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a1a2f7a8ac448ca8d5e0306eefb1d97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f8230e7-78", "ovs_interfaceid": "7f8230e7-7883-4de2-bf5f-ffa36751a171", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 928.488841] env[61974]: DEBUG oslo_concurrency.lockutils [req-a725a5ac-2601-4eff-9af0-1445543aff32 req-ec9701c9-7ec0-4058-93ba-ff4d808dadc4 service nova] Acquired lock "refresh_cache-b1fa5433-8f26-48db-a19d-d1e11245fb44" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.489082] env[61974]: DEBUG nova.network.neutron [req-a725a5ac-2601-4eff-9af0-1445543aff32 req-ec9701c9-7ec0-4058-93ba-ff4d808dadc4 service nova] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Refreshing network info cache for port 7f8230e7-7883-4de2-bf5f-ffa36751a171 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 928.490509] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:98:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b399c74-1411-408a-b4cd-84e268ae83fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f8230e7-7883-4de2-bf5f-ffa36751a171', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 928.499028] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Creating folder: Project (1a1a2f7a8ac448ca8d5e0306eefb1d97). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 928.503035] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2d45a536-db2b-40a3-95ad-2b6bb526fb3c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.510040] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379036, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.518274] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Created folder: Project (1a1a2f7a8ac448ca8d5e0306eefb1d97) in parent group-v292912. [ 928.518513] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Creating folder: Instances. Parent ref: group-v292957. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 928.518783] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0faf8404-fafb-4cac-8c72-8211db5e84b0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.527913] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Created folder: Instances in parent group-v292957. [ 928.528574] env[61974]: DEBUG oslo.service.loopingcall [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 928.528574] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 928.528729] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f6cd1c0a-acab-4b60-86de-afa4f870e28f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.546044] env[61974]: DEBUG oslo_concurrency.lockutils [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Acquiring lock "f88f0ef2-24f2-4eef-92a3-8de2ebb6944a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.546379] env[61974]: DEBUG oslo_concurrency.lockutils [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Lock "f88f0ef2-24f2-4eef-92a3-8de2ebb6944a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.546633] env[61974]: DEBUG oslo_concurrency.lockutils [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Acquiring lock "f88f0ef2-24f2-4eef-92a3-8de2ebb6944a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.546862] env[61974]: DEBUG oslo_concurrency.lockutils [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Lock "f88f0ef2-24f2-4eef-92a3-8de2ebb6944a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.547122] env[61974]: DEBUG oslo_concurrency.lockutils [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Lock "f88f0ef2-24f2-4eef-92a3-8de2ebb6944a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.549245] env[61974]: INFO nova.compute.manager [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Terminating instance [ 928.551678] env[61974]: DEBUG nova.compute.manager [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 928.551900] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 928.552777] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abdd9d3e-426a-4176-bd83-78c5e99903ad {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.556614] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 928.556614] env[61974]: value = "task-1379043" [ 928.556614] env[61974]: _type = "Task" [ 928.556614] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.563222] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 928.563969] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2285bf42-34e6-4052-93d7-0b6d69a17b59 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.569177] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379043, 'name': CreateVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.573671] env[61974]: DEBUG oslo_vmware.api [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Waiting for the task: (returnval){ [ 928.573671] env[61974]: value = "task-1379044" [ 928.573671] env[61974]: _type = "Task" [ 928.573671] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.584403] env[61974]: DEBUG oslo_vmware.api [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': task-1379044, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.704540] env[61974]: DEBUG oslo_vmware.api [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379037, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.706429] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.381s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.707121] env[61974]: DEBUG nova.compute.manager [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 928.710419] env[61974]: DEBUG oslo_concurrency.lockutils [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.388s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.712605] env[61974]: INFO nova.compute.claims [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 928.714682] env[61974]: DEBUG nova.compute.manager [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 928.746595] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379035, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.53941} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.750139] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c/1aa2a63c-e352-4c9b-9445-9b45bf3ae14c.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 928.751406] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 928.752580] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6dc658ad-3630-492c-a9b7-0b4595002628 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.755447] env[61974]: INFO nova.compute.manager [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Took 28.03 seconds to build instance. [ 928.764186] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 928.764186] env[61974]: value = "task-1379045" [ 928.764186] env[61974]: _type = "Task" [ 928.764186] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.772224] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379045, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.885134] env[61974]: DEBUG nova.network.neutron [req-a725a5ac-2601-4eff-9af0-1445543aff32 req-ec9701c9-7ec0-4058-93ba-ff4d808dadc4 service nova] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Updated VIF entry in instance network info cache for port 7f8230e7-7883-4de2-bf5f-ffa36751a171. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 928.885594] env[61974]: DEBUG nova.network.neutron [req-a725a5ac-2601-4eff-9af0-1445543aff32 req-ec9701c9-7ec0-4058-93ba-ff4d808dadc4 service nova] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Updating instance_info_cache with network_info: [{"id": "7f8230e7-7883-4de2-bf5f-ffa36751a171", "address": "fa:16:3e:5f:98:3d", "network": {"id": "39aeb319-53cd-43fa-bc5e-cb665f2e4707", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2053105632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a1a2f7a8ac448ca8d5e0306eefb1d97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f8230e7-78", "ovs_interfaceid": "7f8230e7-7883-4de2-bf5f-ffa36751a171", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.951293] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379040, 'name': CreateVM_Task, 'duration_secs': 0.377633} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.951673] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 928.951951] env[61974]: DEBUG oslo_concurrency.lockutils [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.952134] env[61974]: DEBUG oslo_concurrency.lockutils [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.952470] env[61974]: DEBUG oslo_concurrency.lockutils [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 928.952722] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa34ff6f-c054-4e31-b6c7-bf326df78cd3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.957473] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for the task: (returnval){ [ 928.957473] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]527971f3-03d6-561d-e58b-3b305edaa524" [ 928.957473] env[61974]: _type = "Task" [ 928.957473] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.965602] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]527971f3-03d6-561d-e58b-3b305edaa524, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.993335] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379036, 'name': ReconfigVM_Task, 'duration_secs': 0.677392} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.993615] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 1c1404fd-a954-4849-883b-7898a7e87e2b/1c1404fd-a954-4849-883b-7898a7e87e2b.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 928.994245] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-29345652-c7a0-4c1d-b625-aa21fe9edd7a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.000239] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 929.000239] env[61974]: value = "task-1379046" [ 929.000239] env[61974]: _type = "Task" [ 929.000239] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.008012] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379046, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.066333] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379043, 'name': CreateVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.082731] env[61974]: DEBUG oslo_vmware.api [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': task-1379044, 'name': PowerOffVM_Task, 'duration_secs': 0.211342} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.083046] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 929.083222] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 929.083479] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9000cb65-efa8-47c5-849d-6c3f6091dc6f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.147488] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 929.147721] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 929.147895] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Deleting the datastore file [datastore1] f88f0ef2-24f2-4eef-92a3-8de2ebb6944a {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 929.148214] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-557ddf75-64b8-4ea8-9105-5d939c600891 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.156021] env[61974]: DEBUG oslo_vmware.api [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Waiting for the task: (returnval){ [ 929.156021] env[61974]: value = "task-1379048" [ 929.156021] env[61974]: _type = "Task" [ 929.156021] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.165356] env[61974]: DEBUG oslo_vmware.api [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': task-1379048, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.205259] env[61974]: DEBUG oslo_vmware.api [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379037, 'name': CreateSnapshot_Task, 'duration_secs': 0.6208} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.205568] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Created Snapshot of the VM instance {{(pid=61974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 929.206367] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f708bb-a0a7-4798-a010-1f6327317909 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.220226] env[61974]: DEBUG nova.compute.utils [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 929.223217] env[61974]: DEBUG nova.compute.manager [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Not allocating networking since 'none' was specified. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 929.240460] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.257682] env[61974]: DEBUG oslo_concurrency.lockutils [None req-09759e3a-b8f8-4fe8-9bbc-10e88605e250 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "f0601d26-4e29-4946-bb52-50e2a2163535" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 104.778s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.274746] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379045, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073779} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.275841] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 929.276654] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df43972a-81b9-4a38-ab51-db0d719d63a4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.300204] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c/1aa2a63c-e352-4c9b-9445-9b45bf3ae14c.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 929.301047] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd180c05-b703-4c6e-9732-af0441a7041f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.320751] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 929.320751] env[61974]: value = "task-1379049" [ 929.320751] env[61974]: _type = "Task" [ 929.320751] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.329091] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379049, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.389021] env[61974]: DEBUG oslo_concurrency.lockutils [req-a725a5ac-2601-4eff-9af0-1445543aff32 req-ec9701c9-7ec0-4058-93ba-ff4d808dadc4 service nova] Releasing lock "refresh_cache-b1fa5433-8f26-48db-a19d-d1e11245fb44" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.467778] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]527971f3-03d6-561d-e58b-3b305edaa524, 'name': SearchDatastore_Task, 'duration_secs': 0.014167} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.468129] env[61974]: DEBUG oslo_concurrency.lockutils [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.468391] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 929.468642] env[61974]: DEBUG oslo_concurrency.lockutils [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.468802] env[61974]: DEBUG oslo_concurrency.lockutils [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.468989] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 929.469293] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dea941c4-2cf0-4211-a7f7-465b190a0a49 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.476858] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 929.477057] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 929.477765] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ecb6ffb-b0de-4a23-adbf-c5f60ee333df {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.482831] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for the task: (returnval){ [ 929.482831] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52cbc6ff-bfb0-7ab0-99db-59668e951374" [ 929.482831] env[61974]: _type = "Task" [ 929.482831] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.490446] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52cbc6ff-bfb0-7ab0-99db-59668e951374, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.508292] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379046, 'name': Rename_Task, 'duration_secs': 0.174749} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.508561] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 929.508814] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-88f9a6b0-b9c7-4d13-a3d3-824250497397 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.514742] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 929.514742] env[61974]: value = "task-1379050" [ 929.514742] env[61974]: _type = "Task" [ 929.514742] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.524201] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379050, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.569249] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379043, 'name': CreateVM_Task, 'duration_secs': 0.532146} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.569425] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 929.570152] env[61974]: DEBUG oslo_concurrency.lockutils [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.570289] env[61974]: DEBUG oslo_concurrency.lockutils [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.570625] env[61974]: DEBUG oslo_concurrency.lockutils [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 929.570882] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02d834e9-c8c0-43ef-9120-1d139febf063 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.575599] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 929.575599] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52900513-32c7-7af2-c9e7-4a14c01f7918" [ 929.575599] env[61974]: _type = "Task" [ 929.575599] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.583826] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52900513-32c7-7af2-c9e7-4a14c01f7918, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.666386] env[61974]: DEBUG oslo_vmware.api [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Task: {'id': task-1379048, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131897} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.666619] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 929.666844] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 929.667079] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 929.667283] env[61974]: INFO nova.compute.manager [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 929.667537] env[61974]: DEBUG oslo.service.loopingcall [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 929.667772] env[61974]: DEBUG nova.compute.manager [-] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 929.667875] env[61974]: DEBUG nova.network.neutron [-] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 929.724680] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Creating linked-clone VM from snapshot {{(pid=61974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 929.727971] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-58e06c46-b5ca-47b1-9480-0e3d40a75181 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.731445] env[61974]: DEBUG nova.compute.manager [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 929.739111] env[61974]: DEBUG oslo_vmware.api [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 929.739111] env[61974]: value = "task-1379051" [ 929.739111] env[61974]: _type = "Task" [ 929.739111] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.747338] env[61974]: DEBUG oslo_vmware.api [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379051, 'name': CloneVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.763852] env[61974]: DEBUG nova.compute.manager [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 929.768978] env[61974]: INFO nova.compute.manager [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Rebuilding instance [ 929.820255] env[61974]: DEBUG nova.compute.manager [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 929.821152] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f20aa1-6798-49bc-90fd-d525d0a75a43 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.839147] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379049, 'name': ReconfigVM_Task, 'duration_secs': 0.271991} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.842292] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c/1aa2a63c-e352-4c9b-9445-9b45bf3ae14c.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 929.843516] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-84394d93-d46d-429c-8301-f8b3a05248f5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.849929] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 929.849929] env[61974]: value = "task-1379052" [ 929.849929] env[61974]: _type = "Task" [ 929.849929] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.858762] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379052, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.996532] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52cbc6ff-bfb0-7ab0-99db-59668e951374, 'name': SearchDatastore_Task, 'duration_secs': 0.008231} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.997489] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66b183d8-5087-401f-9fab-763fbb265417 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.004048] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for the task: (returnval){ [ 930.004048] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]520b2b31-edf8-3dc8-1ff7-fc071aeb54d2" [ 930.004048] env[61974]: _type = "Task" [ 930.004048] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.015165] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]520b2b31-edf8-3dc8-1ff7-fc071aeb54d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.023746] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379050, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.051208] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c82fedd-e52f-4ce4-a7f9-8fc50fb08af9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.059103] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d1d75f-dab0-47de-8d3a-383bcaeb58e5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.100060] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6d25ad-9521-45df-a22f-f01293c3fa56 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.111945] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109384f8-35de-41ab-890e-a8474ac46755 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.115695] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52900513-32c7-7af2-c9e7-4a14c01f7918, 'name': SearchDatastore_Task, 'duration_secs': 0.00881} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.117075] env[61974]: DEBUG oslo_concurrency.lockutils [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.117324] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 930.117562] env[61974]: DEBUG oslo_concurrency.lockutils [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.119032] env[61974]: DEBUG nova.compute.manager [req-8291a5ee-dd17-4271-9cd4-a5a6a956375d req-fac3bcc9-2607-4f6f-8083-3c30238f3ae1 service nova] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Received event network-vif-deleted-6198b979-646d-4f9b-bcd2-3dbcab269efd {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 930.119114] env[61974]: INFO nova.compute.manager [req-8291a5ee-dd17-4271-9cd4-a5a6a956375d req-fac3bcc9-2607-4f6f-8083-3c30238f3ae1 service nova] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Neutron deleted interface 6198b979-646d-4f9b-bcd2-3dbcab269efd; detaching it from the instance and deleting it from the info cache [ 930.119354] env[61974]: DEBUG nova.network.neutron [req-8291a5ee-dd17-4271-9cd4-a5a6a956375d req-fac3bcc9-2607-4f6f-8083-3c30238f3ae1 service nova] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.136436] env[61974]: DEBUG nova.compute.provider_tree [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 930.249144] env[61974]: DEBUG oslo_vmware.api [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379051, 'name': CloneVM_Task} progress is 94%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.285838] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.341883] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 930.342205] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e62185e1-cf08-4073-86eb-7ab3376a0fed {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.349903] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 930.349903] env[61974]: value = "task-1379053" [ 930.349903] env[61974]: _type = "Task" [ 930.349903] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.362731] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379053, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.365694] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379052, 'name': Rename_Task, 'duration_secs': 0.232316} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.365948] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 930.366246] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-28443af4-86b5-43b2-8216-1b023c7d6bd3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.371715] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 930.371715] env[61974]: value = "task-1379054" [ 930.371715] env[61974]: _type = "Task" [ 930.371715] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.379542] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379054, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.440713] env[61974]: DEBUG nova.network.neutron [-] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.515893] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]520b2b31-edf8-3dc8-1ff7-fc071aeb54d2, 'name': SearchDatastore_Task, 'duration_secs': 0.011574} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.520047] env[61974]: DEBUG oslo_concurrency.lockutils [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.520215] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 7b338210-5be8-4838-b815-8f2c6cc19ccd/7b338210-5be8-4838-b815-8f2c6cc19ccd.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 930.520868] env[61974]: DEBUG oslo_concurrency.lockutils [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.521082] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 930.521589] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ec388e09-0f9a-4d84-b53d-596c1ea5ad73 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.524308] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee3bb266-d11b-4b22-b482-1eadb9c5cdfc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.529672] env[61974]: DEBUG oslo_vmware.api [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379050, 'name': PowerOnVM_Task, 'duration_secs': 0.575708} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.530273] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 930.530535] env[61974]: INFO nova.compute.manager [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Took 9.44 seconds to spawn the instance on the hypervisor. [ 930.531249] env[61974]: DEBUG nova.compute.manager [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 930.531606] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e686ce-a9da-4ce7-bb11-81d81f50a6de {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.535309] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for the task: (returnval){ [ 930.535309] env[61974]: value = "task-1379055" [ 930.535309] env[61974]: _type = "Task" [ 930.535309] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.536923] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 930.538716] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 930.543080] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-114c690a-a875-4843-a50d-1f9a0c33d11a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.554159] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379055, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.555505] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 930.555505] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5267a00d-3503-9a6a-e1b9-56233e6afb27" [ 930.555505] env[61974]: _type = "Task" [ 930.555505] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.563728] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5267a00d-3503-9a6a-e1b9-56233e6afb27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.637618] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c686954e-50ac-40c4-b1e2-30c1bd6bb55a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.640871] env[61974]: DEBUG nova.scheduler.client.report [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 930.651494] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f5723e-3ed1-4471-a952-aec25bba4bb9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.679416] env[61974]: DEBUG nova.compute.manager [req-8291a5ee-dd17-4271-9cd4-a5a6a956375d req-fac3bcc9-2607-4f6f-8083-3c30238f3ae1 service nova] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Detach interface failed, port_id=6198b979-646d-4f9b-bcd2-3dbcab269efd, reason: Instance f88f0ef2-24f2-4eef-92a3-8de2ebb6944a could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 930.741116] env[61974]: DEBUG nova.compute.manager [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 930.753658] env[61974]: DEBUG oslo_vmware.api [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379051, 'name': CloneVM_Task} progress is 94%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.767481] env[61974]: DEBUG nova.virt.hardware [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 930.767751] env[61974]: DEBUG nova.virt.hardware [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 930.767906] env[61974]: DEBUG nova.virt.hardware [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 930.768106] env[61974]: DEBUG nova.virt.hardware [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 930.768288] env[61974]: DEBUG nova.virt.hardware [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 930.768472] env[61974]: DEBUG nova.virt.hardware [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 930.768695] env[61974]: DEBUG nova.virt.hardware [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 930.768864] env[61974]: DEBUG nova.virt.hardware [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 930.769220] env[61974]: DEBUG nova.virt.hardware [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 930.769465] env[61974]: DEBUG nova.virt.hardware [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 930.769768] env[61974]: DEBUG nova.virt.hardware [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 930.770763] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09531623-03da-40b8-b548-d9b443895f3d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.780258] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ea36cd-478d-4562-b467-a3411121583d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.794857] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Instance VIF info [] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 930.800806] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Creating folder: Project (eb0da45fc37149c9b596e2dcb2f0c610). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 930.801195] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e515e58d-fe34-4d7e-8631-ea1a542f7d24 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.812689] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Created folder: Project (eb0da45fc37149c9b596e2dcb2f0c610) in parent group-v292912. [ 930.812893] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Creating folder: Instances. Parent ref: group-v292962. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 930.813177] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ac99f90-24cb-470c-85f0-9bf747759d83 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.824971] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Created folder: Instances in parent group-v292962. [ 930.825261] env[61974]: DEBUG oslo.service.loopingcall [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 930.825471] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 930.825692] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53bfc118-ccb0-4d70-ba66-4e4280c7a063 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.845255] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 930.845255] env[61974]: value = "task-1379058" [ 930.845255] env[61974]: _type = "Task" [ 930.845255] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.856601] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379058, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.862314] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379053, 'name': PowerOffVM_Task, 'duration_secs': 0.220121} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.862314] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 930.862483] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 930.863234] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3eca18b-a9f3-421d-a4d4-f7d7b7fcb361 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.874067] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 930.878311] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1781c8fa-6ed7-434b-bdc4-295177946078 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.885472] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379054, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.943911] env[61974]: INFO nova.compute.manager [-] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Took 1.28 seconds to deallocate network for instance. [ 930.950958] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 930.951310] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 930.951610] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Deleting the datastore file [datastore1] f0601d26-4e29-4946-bb52-50e2a2163535 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 930.952321] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb34d857-6460-401a-b90a-283fab76094b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.960993] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 930.960993] env[61974]: value = "task-1379060" [ 930.960993] env[61974]: _type = "Task" [ 930.960993] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.972662] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379060, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.048809] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379055, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48919} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.049477] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 7b338210-5be8-4838-b815-8f2c6cc19ccd/7b338210-5be8-4838-b815-8f2c6cc19ccd.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 931.049718] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 931.049983] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dfafd8da-b3a6-43cf-b393-207da81f9220 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.063876] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for the task: (returnval){ [ 931.063876] env[61974]: value = "task-1379061" [ 931.063876] env[61974]: _type = "Task" [ 931.063876] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.064385] env[61974]: INFO nova.compute.manager [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Took 28.50 seconds to build instance. [ 931.073131] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5267a00d-3503-9a6a-e1b9-56233e6afb27, 'name': SearchDatastore_Task, 'duration_secs': 0.016889} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.075054] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1b72873-4c89-47c9-a4f1-1ca89cccf593 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.080907] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379061, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.084404] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 931.084404] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52ced5f4-bfbf-3bce-3d63-315ddf57afd7" [ 931.084404] env[61974]: _type = "Task" [ 931.084404] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.094328] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52ced5f4-bfbf-3bce-3d63-315ddf57afd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.145775] env[61974]: DEBUG oslo_concurrency.lockutils [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.435s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.146307] env[61974]: DEBUG nova.compute.manager [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 931.148942] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.366s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.150383] env[61974]: INFO nova.compute.claims [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 931.252721] env[61974]: DEBUG oslo_vmware.api [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379051, 'name': CloneVM_Task, 'duration_secs': 1.491519} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.252994] env[61974]: INFO nova.virt.vmwareapi.vmops [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Created linked-clone VM from snapshot [ 931.253734] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a135f85-9379-4216-9dc5-ce58746e10e0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.260840] env[61974]: DEBUG nova.virt.vmwareapi.images [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Uploading image 7b64f88a-41ea-4e46-a992-5bf41cb091d5 {{(pid=61974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 931.282684] env[61974]: DEBUG oslo_vmware.rw_handles [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 931.282684] env[61974]: value = "vm-292961" [ 931.282684] env[61974]: _type = "VirtualMachine" [ 931.282684] env[61974]: }. {{(pid=61974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 931.283020] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-959dd57e-2225-486d-823e-934a69ff95b7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.290120] env[61974]: DEBUG oslo_vmware.rw_handles [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lease: (returnval){ [ 931.290120] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52eb9ba7-0932-6caf-fb86-f867dcf3e4f4" [ 931.290120] env[61974]: _type = "HttpNfcLease" [ 931.290120] env[61974]: } obtained for exporting VM: (result){ [ 931.290120] env[61974]: value = "vm-292961" [ 931.290120] env[61974]: _type = "VirtualMachine" [ 931.290120] env[61974]: }. {{(pid=61974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 931.290418] env[61974]: DEBUG oslo_vmware.api [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the lease: (returnval){ [ 931.290418] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52eb9ba7-0932-6caf-fb86-f867dcf3e4f4" [ 931.290418] env[61974]: _type = "HttpNfcLease" [ 931.290418] env[61974]: } to be ready. {{(pid=61974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 931.297083] env[61974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 931.297083] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52eb9ba7-0932-6caf-fb86-f867dcf3e4f4" [ 931.297083] env[61974]: _type = "HttpNfcLease" [ 931.297083] env[61974]: } is initializing. {{(pid=61974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 931.354720] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379058, 'name': CreateVM_Task, 'duration_secs': 0.354533} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.354900] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 931.355343] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.355502] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.355862] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 931.356078] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39a330c1-2a1f-48f8-b3be-eb37a32c7228 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.360551] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 931.360551] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52e25f67-38b6-bfd8-3e21-931b975dde98" [ 931.360551] env[61974]: _type = "Task" [ 931.360551] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.367547] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52e25f67-38b6-bfd8-3e21-931b975dde98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.381150] env[61974]: DEBUG oslo_vmware.api [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379054, 'name': PowerOnVM_Task, 'duration_secs': 0.519844} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.381395] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 931.381673] env[61974]: INFO nova.compute.manager [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Took 7.86 seconds to spawn the instance on the hypervisor. [ 931.381879] env[61974]: DEBUG nova.compute.manager [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 931.382671] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7408c6-9114-4fe2-ae02-69f1c7b39256 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.453607] env[61974]: DEBUG oslo_concurrency.lockutils [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.471416] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379060, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170432} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.471674] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 931.471852] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 931.472036] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 931.574858] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4e018390-542e-401e-aaec-13a325de0db9 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "1c1404fd-a954-4849-883b-7898a7e87e2b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.562s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.575203] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379061, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073706} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.576325] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 931.577255] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0784ab8b-c3d8-4e34-8eec-bbbe640cdc73 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.599937] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 7b338210-5be8-4838-b815-8f2c6cc19ccd/7b338210-5be8-4838-b815-8f2c6cc19ccd.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 931.604249] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e3c41498-00f8-4e4d-83d7-fca8d9fd7839 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.625629] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52ced5f4-bfbf-3bce-3d63-315ddf57afd7, 'name': SearchDatastore_Task, 'duration_secs': 0.009551} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.627397] env[61974]: DEBUG oslo_concurrency.lockutils [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.627720] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] b1fa5433-8f26-48db-a19d-d1e11245fb44/b1fa5433-8f26-48db-a19d-d1e11245fb44.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 931.628159] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for the task: (returnval){ [ 931.628159] env[61974]: value = "task-1379063" [ 931.628159] env[61974]: _type = "Task" [ 931.628159] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.628506] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-90a81c0d-b909-49f0-b8b8-5dc32899b231 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.640928] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379063, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.642493] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 931.642493] env[61974]: value = "task-1379064" [ 931.642493] env[61974]: _type = "Task" [ 931.642493] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.650814] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379064, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.654695] env[61974]: DEBUG nova.compute.utils [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 931.660346] env[61974]: DEBUG nova.compute.manager [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 931.660582] env[61974]: DEBUG nova.network.neutron [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 931.734098] env[61974]: DEBUG nova.policy [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ea7f37c2b55463b9d1d084e73dbf5c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a117fc7495e1478b83f0a543effe8e06', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 931.799202] env[61974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 931.799202] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52eb9ba7-0932-6caf-fb86-f867dcf3e4f4" [ 931.799202] env[61974]: _type = "HttpNfcLease" [ 931.799202] env[61974]: } is ready. {{(pid=61974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 931.799608] env[61974]: DEBUG oslo_vmware.rw_handles [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 931.799608] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52eb9ba7-0932-6caf-fb86-f867dcf3e4f4" [ 931.799608] env[61974]: _type = "HttpNfcLease" [ 931.799608] env[61974]: }. {{(pid=61974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 931.800271] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f07a3a6-8253-40bd-9896-15e4d1b0330b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.808984] env[61974]: DEBUG oslo_vmware.rw_handles [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b7b44e-f32b-49da-024b-3e3ce207e2eb/disk-0.vmdk from lease info. {{(pid=61974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 931.809273] env[61974]: DEBUG oslo_vmware.rw_handles [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b7b44e-f32b-49da-024b-3e3ce207e2eb/disk-0.vmdk for reading. {{(pid=61974) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 931.883754] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52e25f67-38b6-bfd8-3e21-931b975dde98, 'name': SearchDatastore_Task, 'duration_secs': 0.008613} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.884168] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.884428] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 931.884711] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.884873] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.885156] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 931.885533] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9162a520-8c06-4008-b312-b9bfa6a97b14 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.903065] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 931.903433] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 931.904869] env[61974]: INFO nova.compute.manager [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Took 25.08 seconds to build instance. [ 931.906155] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7122e45a-20a0-4d93-8d04-6b24f0c29972 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.912982] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 931.912982] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]520161a1-7b35-3699-e4ff-2b019a353691" [ 931.912982] env[61974]: _type = "Task" [ 931.912982] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.923787] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]520161a1-7b35-3699-e4ff-2b019a353691, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.938025] env[61974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-572a6ae4-da6a-4a8a-9f72-e50b49b5134a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.077340] env[61974]: DEBUG nova.compute.manager [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 932.141211] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379063, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.155974] env[61974]: DEBUG nova.compute.manager [req-ca0714dd-1961-41c5-a61a-2b8ebd1aa865 req-ebc62640-86f9-4ae5-a741-00ecec4cb924 service nova] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Received event network-changed-f76d592c-5eee-4379-b971-9896eb2bb538 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 932.156223] env[61974]: DEBUG nova.compute.manager [req-ca0714dd-1961-41c5-a61a-2b8ebd1aa865 req-ebc62640-86f9-4ae5-a741-00ecec4cb924 service nova] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Refreshing instance network info cache due to event network-changed-f76d592c-5eee-4379-b971-9896eb2bb538. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 932.156475] env[61974]: DEBUG oslo_concurrency.lockutils [req-ca0714dd-1961-41c5-a61a-2b8ebd1aa865 req-ebc62640-86f9-4ae5-a741-00ecec4cb924 service nova] Acquiring lock "refresh_cache-1c1404fd-a954-4849-883b-7898a7e87e2b" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.156656] env[61974]: DEBUG oslo_concurrency.lockutils [req-ca0714dd-1961-41c5-a61a-2b8ebd1aa865 req-ebc62640-86f9-4ae5-a741-00ecec4cb924 service nova] Acquired lock "refresh_cache-1c1404fd-a954-4849-883b-7898a7e87e2b" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.156848] env[61974]: DEBUG nova.network.neutron [req-ca0714dd-1961-41c5-a61a-2b8ebd1aa865 req-ebc62640-86f9-4ae5-a741-00ecec4cb924 service nova] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Refreshing network info cache for port f76d592c-5eee-4379-b971-9896eb2bb538 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 932.162092] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379064, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480922} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.162623] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] b1fa5433-8f26-48db-a19d-d1e11245fb44/b1fa5433-8f26-48db-a19d-d1e11245fb44.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 932.162914] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 932.164282] env[61974]: DEBUG nova.compute.manager [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 932.170730] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f00ad6a7-2b06-464c-aad0-513410cbfd80 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.183442] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 932.183442] env[61974]: value = "task-1379065" [ 932.183442] env[61974]: _type = "Task" [ 932.183442] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.193196] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379065, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.215868] env[61974]: DEBUG nova.network.neutron [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Successfully created port: 27cf697b-5e9a-4214-907e-4bd03824c8fa {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 932.410580] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a4850941-58df-407d-9c1c-fc6011303051 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "1aa2a63c-e352-4c9b-9445-9b45bf3ae14c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.401s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.426619] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]520161a1-7b35-3699-e4ff-2b019a353691, 'name': SearchDatastore_Task, 'duration_secs': 0.057191} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.427563] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-435ab66f-0bd8-4f4b-af0e-a26ee738ced3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.436028] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 932.436028] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52385789-b593-f2f6-eae7-e83fd16b098a" [ 932.436028] env[61974]: _type = "Task" [ 932.436028] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.455153] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52385789-b593-f2f6-eae7-e83fd16b098a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.520099] env[61974]: DEBUG nova.virt.hardware [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 932.520628] env[61974]: DEBUG nova.virt.hardware [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 932.520628] env[61974]: DEBUG nova.virt.hardware [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 932.520838] env[61974]: DEBUG nova.virt.hardware [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 932.522650] env[61974]: DEBUG nova.virt.hardware [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 932.522650] env[61974]: DEBUG nova.virt.hardware [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 932.522650] env[61974]: DEBUG nova.virt.hardware [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 932.522650] env[61974]: DEBUG nova.virt.hardware [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 932.522650] env[61974]: DEBUG nova.virt.hardware [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 932.522864] env[61974]: DEBUG nova.virt.hardware [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 932.522864] env[61974]: DEBUG nova.virt.hardware [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 932.523829] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9467e17-9056-4197-a482-4eda6ac187af {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.532798] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f868df4-0dca-4ee8-ad79-04bddfd4a482 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.549481] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:c8:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f4399275-8e92-4448-be9e-d4984e93e89c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '31fec0d1-b5b6-4c0c-ba81-11fad03cfa19', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 932.558716] env[61974]: DEBUG oslo.service.loopingcall [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 932.562787] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 932.562787] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b2e7ec55-1ea7-402a-bc6c-f6225cc31207 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.585921] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 932.585921] env[61974]: value = "task-1379066" [ 932.585921] env[61974]: _type = "Task" [ 932.585921] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.597388] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379066, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.599368] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8469858-46ae-42c5-b0bf-aefaee1b8d3b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.607122] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.608281] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a1690a-cde4-485e-b54b-b7b8585493b4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.649857] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2dce2e-6f9e-4a1e-94bc-72f986c3d9d4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.660190] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379063, 'name': ReconfigVM_Task, 'duration_secs': 0.668759} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.663240] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 7b338210-5be8-4838-b815-8f2c6cc19ccd/7b338210-5be8-4838-b815-8f2c6cc19ccd.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 932.666518] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9c7e38a9-61ff-4379-ae8d-1e9010c529a5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.669767] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9765d95-4328-4883-b53e-c4b6c1ad798d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.691356] env[61974]: DEBUG nova.compute.provider_tree [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 932.693210] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for the task: (returnval){ [ 932.693210] env[61974]: value = "task-1379067" [ 932.693210] env[61974]: _type = "Task" [ 932.693210] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.707406] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379065, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083244} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.711839] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 932.712653] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379067, 'name': Rename_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.713564] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68191891-910f-484d-b2ba-ad45a34f92e5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.738168] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] b1fa5433-8f26-48db-a19d-d1e11245fb44/b1fa5433-8f26-48db-a19d-d1e11245fb44.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 932.739240] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8afb2cbe-b3af-4469-889c-ba26b538622c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.759742] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 932.759742] env[61974]: value = "task-1379068" [ 932.759742] env[61974]: _type = "Task" [ 932.759742] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.768484] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379068, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.897690] env[61974]: DEBUG nova.network.neutron [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Successfully created port: ce99556d-de01-4549-9f98-e0e52d4a0b16 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 932.915161] env[61974]: DEBUG nova.compute.manager [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 932.947177] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52385789-b593-f2f6-eae7-e83fd16b098a, 'name': SearchDatastore_Task, 'duration_secs': 0.021469} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.947586] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.947867] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 0ce75511-290c-4fea-9657-dfdd8d9efc4b/0ce75511-290c-4fea-9657-dfdd8d9efc4b.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 932.948253] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1a03616-1223-4d41-9d3f-0a673de06a62 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.954827] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 932.954827] env[61974]: value = "task-1379069" [ 932.954827] env[61974]: _type = "Task" [ 932.954827] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.964045] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379069, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.100923] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379066, 'name': CreateVM_Task, 'duration_secs': 0.396166} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.100923] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 933.100923] env[61974]: DEBUG oslo_concurrency.lockutils [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.100923] env[61974]: DEBUG oslo_concurrency.lockutils [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.100923] env[61974]: DEBUG oslo_concurrency.lockutils [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 933.103239] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5dc01274-37ea-47a6-aeb3-bcf063c86d76 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.111022] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 933.111022] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]523fcb2d-e98e-b5c1-844c-cb1c12c0430f" [ 933.111022] env[61974]: _type = "Task" [ 933.111022] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.115772] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]523fcb2d-e98e-b5c1-844c-cb1c12c0430f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.195684] env[61974]: DEBUG nova.compute.manager [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 933.209309] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379067, 'name': Rename_Task, 'duration_secs': 0.200224} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.209676] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 933.209864] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2bf08d89-8a25-4899-a280-aa4cddede86d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.215603] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for the task: (returnval){ [ 933.215603] env[61974]: value = "task-1379070" [ 933.215603] env[61974]: _type = "Task" [ 933.215603] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.218006] env[61974]: ERROR nova.scheduler.client.report [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [req-38434026-c205-43e0-a5e4-ad879d704b7a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 81f72dd1-35ef-4b87-b120-a6ea5ab8608a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-38434026-c205-43e0-a5e4-ad879d704b7a"}]} [ 933.225180] env[61974]: DEBUG nova.virt.hardware [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 933.225424] env[61974]: DEBUG nova.virt.hardware [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 933.225585] env[61974]: DEBUG nova.virt.hardware [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 933.225770] env[61974]: DEBUG nova.virt.hardware [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 933.225918] env[61974]: DEBUG nova.virt.hardware [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 933.226094] env[61974]: DEBUG nova.virt.hardware [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 933.226310] env[61974]: DEBUG nova.virt.hardware [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 933.226477] env[61974]: DEBUG nova.virt.hardware [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 933.226671] env[61974]: DEBUG nova.virt.hardware [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 933.226800] env[61974]: DEBUG nova.virt.hardware [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 933.226978] env[61974]: DEBUG nova.virt.hardware [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 933.227764] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39826b89-3a6a-4371-b252-655ae4a74a17 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.236962] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379070, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.240513] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b9001c-2720-453b-8969-41ad5c4af729 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.245473] env[61974]: DEBUG nova.scheduler.client.report [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Refreshing inventories for resource provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 933.261995] env[61974]: DEBUG nova.scheduler.client.report [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Updating ProviderTree inventory for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 933.262288] env[61974]: DEBUG nova.compute.provider_tree [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 933.273363] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379068, 'name': ReconfigVM_Task, 'duration_secs': 0.321014} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.273696] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Reconfigured VM instance instance-00000045 to attach disk [datastore2] b1fa5433-8f26-48db-a19d-d1e11245fb44/b1fa5433-8f26-48db-a19d-d1e11245fb44.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 933.274319] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ca5fb00d-659b-4b20-a9c5-2cd25c759ef8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.280361] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 933.280361] env[61974]: value = "task-1379071" [ 933.280361] env[61974]: _type = "Task" [ 933.280361] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.284698] env[61974]: DEBUG nova.scheduler.client.report [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Refreshing aggregate associations for resource provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a, aggregates: None {{(pid=61974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 933.292033] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379071, 'name': Rename_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.292975] env[61974]: DEBUG nova.network.neutron [req-ca0714dd-1961-41c5-a61a-2b8ebd1aa865 req-ebc62640-86f9-4ae5-a741-00ecec4cb924 service nova] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Updated VIF entry in instance network info cache for port f76d592c-5eee-4379-b971-9896eb2bb538. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 933.293353] env[61974]: DEBUG nova.network.neutron [req-ca0714dd-1961-41c5-a61a-2b8ebd1aa865 req-ebc62640-86f9-4ae5-a741-00ecec4cb924 service nova] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Updating instance_info_cache with network_info: [{"id": "f76d592c-5eee-4379-b971-9896eb2bb538", "address": "fa:16:3e:d0:c9:51", "network": {"id": "be36ebfc-3548-4420-b5b4-b3efb499516a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1190763400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.201", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61c671d85b64b28872586c2816b83f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf76d592c-5e", "ovs_interfaceid": "f76d592c-5eee-4379-b971-9896eb2bb538", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.317590] env[61974]: DEBUG nova.scheduler.client.report [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Refreshing trait associations for resource provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=61974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 933.438310] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.465852] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379069, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.585833] env[61974]: DEBUG oslo_concurrency.lockutils [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "1aa2a63c-e352-4c9b-9445-9b45bf3ae14c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.585833] env[61974]: DEBUG oslo_concurrency.lockutils [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "1aa2a63c-e352-4c9b-9445-9b45bf3ae14c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.622031] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]523fcb2d-e98e-b5c1-844c-cb1c12c0430f, 'name': SearchDatastore_Task, 'duration_secs': 0.02244} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.622441] env[61974]: DEBUG oslo_concurrency.lockutils [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.622684] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 933.623292] env[61974]: DEBUG oslo_concurrency.lockutils [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.623292] env[61974]: DEBUG oslo_concurrency.lockutils [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.623531] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 933.627583] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-482f1516-f0af-4f7f-bbb3-931f0091fa7b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.636268] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 933.636482] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 933.637306] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95656336-2517-4228-a54a-b2adeea84aed {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.646028] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 933.646028] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]528b7d86-c765-f9ae-fc20-4de5c60a041a" [ 933.646028] env[61974]: _type = "Task" [ 933.646028] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.654360] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]528b7d86-c765-f9ae-fc20-4de5c60a041a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.660760] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffecfd20-936e-4e20-a0f3-7cf128b3026a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.668311] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9ae045-60a2-4f5a-ab1e-8a5b5859111b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.698370] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d61fb7e0-cebf-49eb-972a-29df12f424f9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.706109] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56919d70-b0d2-46d2-9208-e647f7fcb8fe {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.721068] env[61974]: DEBUG nova.compute.provider_tree [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 933.732933] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379070, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.790040] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379071, 'name': Rename_Task, 'duration_secs': 0.26339} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.790424] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 933.791229] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c4a6b64e-9da5-4d29-a593-a6903d8a6358 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.796300] env[61974]: DEBUG oslo_concurrency.lockutils [req-ca0714dd-1961-41c5-a61a-2b8ebd1aa865 req-ebc62640-86f9-4ae5-a741-00ecec4cb924 service nova] Releasing lock "refresh_cache-1c1404fd-a954-4849-883b-7898a7e87e2b" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.797800] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 933.797800] env[61974]: value = "task-1379072" [ 933.797800] env[61974]: _type = "Task" [ 933.797800] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.807422] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379072, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.967563] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379069, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.656361} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.967836] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 0ce75511-290c-4fea-9657-dfdd8d9efc4b/0ce75511-290c-4fea-9657-dfdd8d9efc4b.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 933.968076] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 933.968383] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8b07d2c-ef44-4c2b-80e3-917f607d0226 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.974674] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 933.974674] env[61974]: value = "task-1379073" [ 933.974674] env[61974]: _type = "Task" [ 933.974674] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.983217] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379073, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.088455] env[61974]: DEBUG nova.compute.utils [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 934.156551] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]528b7d86-c765-f9ae-fc20-4de5c60a041a, 'name': SearchDatastore_Task, 'duration_secs': 0.010194} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.157436] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b9b57b4-03d1-4e66-89a3-7d53266fc491 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.162964] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 934.162964] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]520f647c-4c5a-b348-1fe2-1efae104ab98" [ 934.162964] env[61974]: _type = "Task" [ 934.162964] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.171216] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]520f647c-4c5a-b348-1fe2-1efae104ab98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.232285] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379070, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.259681] env[61974]: DEBUG nova.scheduler.client.report [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Updated inventory for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with generation 92 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 934.260029] env[61974]: DEBUG nova.compute.provider_tree [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Updating resource provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a generation from 92 to 93 during operation: update_inventory {{(pid=61974) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 934.260209] env[61974]: DEBUG nova.compute.provider_tree [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 934.307058] env[61974]: DEBUG oslo_vmware.api [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379072, 'name': PowerOnVM_Task, 'duration_secs': 0.504833} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.307331] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 934.307553] env[61974]: INFO nova.compute.manager [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Took 8.36 seconds to spawn the instance on the hypervisor. [ 934.307734] env[61974]: DEBUG nova.compute.manager [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 934.308533] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0f4477d-343b-4822-bf13-5c5c18068848 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.484726] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379073, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072873} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.485039] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 934.485797] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa6a174d-20cc-4a3d-8502-d7bcd8dad3a6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.505312] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] 0ce75511-290c-4fea-9657-dfdd8d9efc4b/0ce75511-290c-4fea-9657-dfdd8d9efc4b.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 934.505653] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0744cd7-3731-463d-b912-e0f706d0e1b9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.525636] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 934.525636] env[61974]: value = "task-1379074" [ 934.525636] env[61974]: _type = "Task" [ 934.525636] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.535645] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379074, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.593839] env[61974]: DEBUG oslo_concurrency.lockutils [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "1aa2a63c-e352-4c9b-9445-9b45bf3ae14c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.675249] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]520f647c-4c5a-b348-1fe2-1efae104ab98, 'name': SearchDatastore_Task, 'duration_secs': 0.010407} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.675510] env[61974]: DEBUG oslo_concurrency.lockutils [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.675782] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] f0601d26-4e29-4946-bb52-50e2a2163535/f0601d26-4e29-4946-bb52-50e2a2163535.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 934.676068] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-12fce907-f289-4f60-b7f8-b0d0cc79f116 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.682632] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 934.682632] env[61974]: value = "task-1379075" [ 934.682632] env[61974]: _type = "Task" [ 934.682632] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.691820] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379075, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.725983] env[61974]: DEBUG nova.network.neutron [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Successfully updated port: 27cf697b-5e9a-4214-907e-4bd03824c8fa {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 934.731437] env[61974]: DEBUG nova.compute.manager [req-a7c3d934-5eb2-48b6-85cf-59e012aa9b5d req-8ad5bc74-4ee5-4149-ba63-7bd21d7dd607 service nova] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Received event network-vif-plugged-27cf697b-5e9a-4214-907e-4bd03824c8fa {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 934.731817] env[61974]: DEBUG oslo_concurrency.lockutils [req-a7c3d934-5eb2-48b6-85cf-59e012aa9b5d req-8ad5bc74-4ee5-4149-ba63-7bd21d7dd607 service nova] Acquiring lock "097ad079-9712-4183-9135-b15ad3a65d6d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.731817] env[61974]: DEBUG oslo_concurrency.lockutils [req-a7c3d934-5eb2-48b6-85cf-59e012aa9b5d req-8ad5bc74-4ee5-4149-ba63-7bd21d7dd607 service nova] Lock "097ad079-9712-4183-9135-b15ad3a65d6d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.731994] env[61974]: DEBUG oslo_concurrency.lockutils [req-a7c3d934-5eb2-48b6-85cf-59e012aa9b5d req-8ad5bc74-4ee5-4149-ba63-7bd21d7dd607 service nova] Lock "097ad079-9712-4183-9135-b15ad3a65d6d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.732187] env[61974]: DEBUG nova.compute.manager [req-a7c3d934-5eb2-48b6-85cf-59e012aa9b5d req-8ad5bc74-4ee5-4149-ba63-7bd21d7dd607 service nova] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] No waiting events found dispatching network-vif-plugged-27cf697b-5e9a-4214-907e-4bd03824c8fa {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 934.732373] env[61974]: WARNING nova.compute.manager [req-a7c3d934-5eb2-48b6-85cf-59e012aa9b5d req-8ad5bc74-4ee5-4149-ba63-7bd21d7dd607 service nova] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Received unexpected event network-vif-plugged-27cf697b-5e9a-4214-907e-4bd03824c8fa for instance with vm_state building and task_state spawning. [ 934.736312] env[61974]: DEBUG oslo_vmware.api [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379070, 'name': PowerOnVM_Task, 'duration_secs': 1.239835} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.736593] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 934.736820] env[61974]: INFO nova.compute.manager [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Took 6.39 seconds to spawn the instance on the hypervisor. [ 934.737010] env[61974]: DEBUG nova.compute.manager [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 934.737832] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6a42fe-962b-499d-9c11-76fbf96ffd6b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.767358] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.617s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.767358] env[61974]: DEBUG nova.compute.manager [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 934.770229] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.891s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.772251] env[61974]: INFO nova.compute.claims [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 934.824403] env[61974]: INFO nova.compute.manager [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Took 26.21 seconds to build instance. [ 935.040809] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379074, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.197085] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379075, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.256465] env[61974]: INFO nova.compute.manager [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Took 24.78 seconds to build instance. [ 935.277388] env[61974]: DEBUG nova.compute.utils [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 935.283326] env[61974]: DEBUG nova.compute.manager [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Not allocating networking since 'none' was specified. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 935.326811] env[61974]: DEBUG oslo_concurrency.lockutils [None req-365b183c-44bf-465f-b8ed-05f808de7df7 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "b1fa5433-8f26-48db-a19d-d1e11245fb44" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.081s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.352910] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 935.353148] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 935.538882] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379074, 'name': ReconfigVM_Task, 'duration_secs': 0.537388} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.541102] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Reconfigured VM instance instance-00000047 to attach disk [datastore2] 0ce75511-290c-4fea-9657-dfdd8d9efc4b/0ce75511-290c-4fea-9657-dfdd8d9efc4b.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 935.541102] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-57d1e552-4c19-4856-b007-95c511737f97 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.546042] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 935.546042] env[61974]: value = "task-1379076" [ 935.546042] env[61974]: _type = "Task" [ 935.546042] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.554900] env[61974]: INFO nova.compute.manager [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Rebuilding instance [ 935.556799] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379076, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.607934] env[61974]: DEBUG nova.compute.manager [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 935.609251] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f9e7ac-6a1a-4456-aabb-231a31fda4f2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.670630] env[61974]: DEBUG oslo_concurrency.lockutils [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "1aa2a63c-e352-4c9b-9445-9b45bf3ae14c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.670988] env[61974]: DEBUG oslo_concurrency.lockutils [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "1aa2a63c-e352-4c9b-9445-9b45bf3ae14c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.671263] env[61974]: INFO nova.compute.manager [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Attaching volume 004ebd4b-70b2-40ab-8253-dc095c5312e1 to /dev/sdb [ 935.694608] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379075, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.566312} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.694858] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] f0601d26-4e29-4946-bb52-50e2a2163535/f0601d26-4e29-4946-bb52-50e2a2163535.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 935.695091] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 935.695386] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b9a4395a-92dc-483b-87d3-46bc34f9e9b6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.702366] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 935.702366] env[61974]: value = "task-1379077" [ 935.702366] env[61974]: _type = "Task" [ 935.702366] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.714238] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379077, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.718035] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69cde58-24c4-4900-a15e-89951dae58e9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.723479] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-160567b6-d292-4bad-a62c-2259cef13ae7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.737411] env[61974]: DEBUG nova.virt.block_device [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Updating existing volume attachment record: 83a1cdf5-a4bf-42e8-ab95-6866d6b8904a {{(pid=61974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 935.759875] env[61974]: DEBUG oslo_concurrency.lockutils [None req-aef05351-2c47-46d6-9e4f-a1e16b989784 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Lock "7b338210-5be8-4838-b815-8f2c6cc19ccd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.918s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.784739] env[61974]: DEBUG nova.compute.manager [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 935.829416] env[61974]: DEBUG nova.compute.manager [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 935.859237] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 935.859454] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Starting heal instance info cache {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 936.057337] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379076, 'name': Rename_Task, 'duration_secs': 0.212266} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.057635] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 936.057919] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-469a605b-c665-4c56-be52-54d556ebe0b9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.065011] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 936.065011] env[61974]: value = "task-1379081" [ 936.065011] env[61974]: _type = "Task" [ 936.065011] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.072920] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379081, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.103377] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e641f5d-33ff-4efb-8543-614953640a20 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.110974] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d56662-bc56-4db9-9a10-8b43478e2a95 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.145141] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 936.145379] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c5b1722c-2a91-4161-9341-33afa9ca461f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.148385] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b23987-af0f-424e-8ef6-84e3c847e794 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.156320] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-691c8d94-8fa6-4e90-9e7f-0965bdae0137 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.161430] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for the task: (returnval){ [ 936.161430] env[61974]: value = "task-1379082" [ 936.161430] env[61974]: _type = "Task" [ 936.161430] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.172618] env[61974]: DEBUG nova.compute.provider_tree [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 936.178791] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379082, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.213071] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379077, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.113969} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.213380] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 936.214557] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33da509d-79b5-46d4-8543-99eda587f740 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.236868] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] f0601d26-4e29-4946-bb52-50e2a2163535/f0601d26-4e29-4946-bb52-50e2a2163535.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 936.237216] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc06f51e-b81c-4e47-8fa7-8e655c8a12c1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.256824] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 936.256824] env[61974]: value = "task-1379083" [ 936.256824] env[61974]: _type = "Task" [ 936.256824] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.261654] env[61974]: DEBUG nova.compute.manager [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 936.269041] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379083, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.358952] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.368041] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Didn't find any instances for network info cache update. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 936.368353] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 936.368545] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 936.368687] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 936.368854] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 936.369019] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 936.369200] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 936.369361] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 936.369490] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager.update_available_resource {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 936.439100] env[61974]: DEBUG nova.compute.manager [req-741835bf-2b77-41f6-bee6-741b7c842f3a req-e6a7de36-55bb-4db6-a343-8e226c11ed62 service nova] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Received event network-changed-7f8230e7-7883-4de2-bf5f-ffa36751a171 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 936.439324] env[61974]: DEBUG nova.compute.manager [req-741835bf-2b77-41f6-bee6-741b7c842f3a req-e6a7de36-55bb-4db6-a343-8e226c11ed62 service nova] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Refreshing instance network info cache due to event network-changed-7f8230e7-7883-4de2-bf5f-ffa36751a171. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 936.439551] env[61974]: DEBUG oslo_concurrency.lockutils [req-741835bf-2b77-41f6-bee6-741b7c842f3a req-e6a7de36-55bb-4db6-a343-8e226c11ed62 service nova] Acquiring lock "refresh_cache-b1fa5433-8f26-48db-a19d-d1e11245fb44" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.439761] env[61974]: DEBUG oslo_concurrency.lockutils [req-741835bf-2b77-41f6-bee6-741b7c842f3a req-e6a7de36-55bb-4db6-a343-8e226c11ed62 service nova] Acquired lock "refresh_cache-b1fa5433-8f26-48db-a19d-d1e11245fb44" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.439859] env[61974]: DEBUG nova.network.neutron [req-741835bf-2b77-41f6-bee6-741b7c842f3a req-e6a7de36-55bb-4db6-a343-8e226c11ed62 service nova] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Refreshing network info cache for port 7f8230e7-7883-4de2-bf5f-ffa36751a171 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 936.577509] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379081, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.673819] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379082, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.676689] env[61974]: DEBUG nova.scheduler.client.report [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 936.767037] env[61974]: DEBUG nova.compute.manager [req-f08d7e8d-d593-42ff-86bf-a9ee6882829c req-6480b9d7-39cc-43ea-97d4-f55099649feb service nova] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Received event network-changed-27cf697b-5e9a-4214-907e-4bd03824c8fa {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 936.768024] env[61974]: DEBUG nova.compute.manager [req-f08d7e8d-d593-42ff-86bf-a9ee6882829c req-6480b9d7-39cc-43ea-97d4-f55099649feb service nova] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Refreshing instance network info cache due to event network-changed-27cf697b-5e9a-4214-907e-4bd03824c8fa. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 936.768024] env[61974]: DEBUG oslo_concurrency.lockutils [req-f08d7e8d-d593-42ff-86bf-a9ee6882829c req-6480b9d7-39cc-43ea-97d4-f55099649feb service nova] Acquiring lock "refresh_cache-097ad079-9712-4183-9135-b15ad3a65d6d" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.768024] env[61974]: DEBUG oslo_concurrency.lockutils [req-f08d7e8d-d593-42ff-86bf-a9ee6882829c req-6480b9d7-39cc-43ea-97d4-f55099649feb service nova] Acquired lock "refresh_cache-097ad079-9712-4183-9135-b15ad3a65d6d" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.768237] env[61974]: DEBUG nova.network.neutron [req-f08d7e8d-d593-42ff-86bf-a9ee6882829c req-6480b9d7-39cc-43ea-97d4-f55099649feb service nova] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Refreshing network info cache for port 27cf697b-5e9a-4214-907e-4bd03824c8fa {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 936.778829] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379083, 'name': ReconfigVM_Task, 'duration_secs': 0.364732} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.782677] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Reconfigured VM instance instance-00000042 to attach disk [datastore2] f0601d26-4e29-4946-bb52-50e2a2163535/f0601d26-4e29-4946-bb52-50e2a2163535.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 936.784676] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-58ea8bae-adc5-4a93-b797-768b20c7bff3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.793091] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 936.793091] env[61974]: value = "task-1379084" [ 936.793091] env[61974]: _type = "Task" [ 936.793091] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.800057] env[61974]: DEBUG nova.compute.manager [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 936.803830] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.813645] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379084, 'name': Rename_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.815369] env[61974]: DEBUG nova.network.neutron [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Successfully updated port: ce99556d-de01-4549-9f98-e0e52d4a0b16 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 936.830511] env[61974]: DEBUG nova.virt.hardware [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 936.830927] env[61974]: DEBUG nova.virt.hardware [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 936.831207] env[61974]: DEBUG nova.virt.hardware [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 936.831541] env[61974]: DEBUG nova.virt.hardware [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 936.831783] env[61974]: DEBUG nova.virt.hardware [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 936.832059] env[61974]: DEBUG nova.virt.hardware [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 936.832406] env[61974]: DEBUG nova.virt.hardware [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 936.832681] env[61974]: DEBUG nova.virt.hardware [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 936.832957] env[61974]: DEBUG nova.virt.hardware [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 936.833257] env[61974]: DEBUG nova.virt.hardware [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 936.833582] env[61974]: DEBUG nova.virt.hardware [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 936.834918] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-181d2c0a-788a-4182-b57b-2046d866f27c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.848480] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed3b547e-496b-4337-bb05-0473cc9683c4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.871729] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Instance VIF info [] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 936.883052] env[61974]: DEBUG oslo.service.loopingcall [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 936.884042] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.884514] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 936.885097] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e7484a98-a87d-4413-952a-70a6693635e0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.915020] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 936.915020] env[61974]: value = "task-1379085" [ 936.915020] env[61974]: _type = "Task" [ 936.915020] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.925389] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379085, 'name': CreateVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.076849] env[61974]: DEBUG oslo_vmware.api [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379081, 'name': PowerOnVM_Task, 'duration_secs': 0.680606} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.077228] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 937.077533] env[61974]: INFO nova.compute.manager [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Took 6.34 seconds to spawn the instance on the hypervisor. [ 937.077853] env[61974]: DEBUG nova.compute.manager [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 937.078803] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-021a0261-40e3-4647-a763-819649846371 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.182811] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379082, 'name': PowerOffVM_Task, 'duration_secs': 0.808701} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.183286] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 937.183612] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 937.184836] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa24b826-8c33-4c92-90c8-f9f7aa26e2d7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.189653] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.419s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.190390] env[61974]: DEBUG nova.compute.manager [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 937.198037] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.951s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.198037] env[61974]: DEBUG nova.objects.instance [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Lazy-loading 'resources' on Instance uuid a9edbd98-3e67-476b-934d-15d893a62d02 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 937.201911] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 937.202238] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1e5e898c-c679-40d7-af8d-60fd8c8ef881 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.228088] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 937.228575] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 937.228896] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Deleting the datastore file [datastore2] 7b338210-5be8-4838-b815-8f2c6cc19ccd {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 937.229907] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad991094-a6dc-48af-bd8a-0c02e6e26c2e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.238943] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for the task: (returnval){ [ 937.238943] env[61974]: value = "task-1379087" [ 937.238943] env[61974]: _type = "Task" [ 937.238943] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.248200] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379087, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.255350] env[61974]: DEBUG nova.network.neutron [req-741835bf-2b77-41f6-bee6-741b7c842f3a req-e6a7de36-55bb-4db6-a343-8e226c11ed62 service nova] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Updated VIF entry in instance network info cache for port 7f8230e7-7883-4de2-bf5f-ffa36751a171. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 937.255833] env[61974]: DEBUG nova.network.neutron [req-741835bf-2b77-41f6-bee6-741b7c842f3a req-e6a7de36-55bb-4db6-a343-8e226c11ed62 service nova] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Updating instance_info_cache with network_info: [{"id": "7f8230e7-7883-4de2-bf5f-ffa36751a171", "address": "fa:16:3e:5f:98:3d", "network": {"id": "39aeb319-53cd-43fa-bc5e-cb665f2e4707", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2053105632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a1a2f7a8ac448ca8d5e0306eefb1d97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f8230e7-78", "ovs_interfaceid": "7f8230e7-7883-4de2-bf5f-ffa36751a171", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.304879] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379084, 'name': Rename_Task, 'duration_secs': 0.171294} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.304879] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 937.305139] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-20d11d74-26c8-4bb9-bea1-e8b6924b3d5b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.311285] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 937.311285] env[61974]: value = "task-1379088" [ 937.311285] env[61974]: _type = "Task" [ 937.311285] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.315509] env[61974]: DEBUG nova.network.neutron [req-f08d7e8d-d593-42ff-86bf-a9ee6882829c req-6480b9d7-39cc-43ea-97d4-f55099649feb service nova] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 937.318125] env[61974]: DEBUG oslo_concurrency.lockutils [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Acquiring lock "refresh_cache-097ad079-9712-4183-9135-b15ad3a65d6d" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.321990] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379088, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.402744] env[61974]: DEBUG nova.network.neutron [req-f08d7e8d-d593-42ff-86bf-a9ee6882829c req-6480b9d7-39cc-43ea-97d4-f55099649feb service nova] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.422021] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379085, 'name': CreateVM_Task, 'duration_secs': 0.341171} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.422265] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 937.422730] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.422902] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.423260] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 937.423535] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbd3fabd-7ac8-48aa-b376-79b260d61d74 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.428410] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 937.428410] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52b9b6a8-c6f1-ec20-1184-73bd225451fd" [ 937.428410] env[61974]: _type = "Task" [ 937.428410] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.437196] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52b9b6a8-c6f1-ec20-1184-73bd225451fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.595979] env[61974]: INFO nova.compute.manager [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Took 25.54 seconds to build instance. [ 937.701958] env[61974]: DEBUG nova.compute.utils [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 937.703533] env[61974]: DEBUG nova.compute.manager [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 937.703705] env[61974]: DEBUG nova.network.neutron [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 937.751627] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379087, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.345976} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.751919] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 937.752213] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 937.752346] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 937.756189] env[61974]: DEBUG nova.policy [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '44db76dad27e40cdb4507bfe842db572', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fb6e7e7e52fc4aacaf5054732cd7d2df', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 937.758294] env[61974]: DEBUG oslo_concurrency.lockutils [req-741835bf-2b77-41f6-bee6-741b7c842f3a req-e6a7de36-55bb-4db6-a343-8e226c11ed62 service nova] Releasing lock "refresh_cache-b1fa5433-8f26-48db-a19d-d1e11245fb44" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.821730] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379088, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.905642] env[61974]: DEBUG oslo_concurrency.lockutils [req-f08d7e8d-d593-42ff-86bf-a9ee6882829c req-6480b9d7-39cc-43ea-97d4-f55099649feb service nova] Releasing lock "refresh_cache-097ad079-9712-4183-9135-b15ad3a65d6d" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.905935] env[61974]: DEBUG nova.compute.manager [req-f08d7e8d-d593-42ff-86bf-a9ee6882829c req-6480b9d7-39cc-43ea-97d4-f55099649feb service nova] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Received event network-vif-plugged-ce99556d-de01-4549-9f98-e0e52d4a0b16 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 937.906361] env[61974]: DEBUG oslo_concurrency.lockutils [req-f08d7e8d-d593-42ff-86bf-a9ee6882829c req-6480b9d7-39cc-43ea-97d4-f55099649feb service nova] Acquiring lock "097ad079-9712-4183-9135-b15ad3a65d6d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.906638] env[61974]: DEBUG oslo_concurrency.lockutils [req-f08d7e8d-d593-42ff-86bf-a9ee6882829c req-6480b9d7-39cc-43ea-97d4-f55099649feb service nova] Lock "097ad079-9712-4183-9135-b15ad3a65d6d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.906824] env[61974]: DEBUG oslo_concurrency.lockutils [req-f08d7e8d-d593-42ff-86bf-a9ee6882829c req-6480b9d7-39cc-43ea-97d4-f55099649feb service nova] Lock "097ad079-9712-4183-9135-b15ad3a65d6d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.907032] env[61974]: DEBUG nova.compute.manager [req-f08d7e8d-d593-42ff-86bf-a9ee6882829c req-6480b9d7-39cc-43ea-97d4-f55099649feb service nova] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] No waiting events found dispatching network-vif-plugged-ce99556d-de01-4549-9f98-e0e52d4a0b16 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 937.907309] env[61974]: WARNING nova.compute.manager [req-f08d7e8d-d593-42ff-86bf-a9ee6882829c req-6480b9d7-39cc-43ea-97d4-f55099649feb service nova] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Received unexpected event network-vif-plugged-ce99556d-de01-4549-9f98-e0e52d4a0b16 for instance with vm_state building and task_state spawning. [ 937.907761] env[61974]: DEBUG oslo_concurrency.lockutils [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Acquired lock "refresh_cache-097ad079-9712-4183-9135-b15ad3a65d6d" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.907935] env[61974]: DEBUG nova.network.neutron [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 937.939558] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52b9b6a8-c6f1-ec20-1184-73bd225451fd, 'name': SearchDatastore_Task, 'duration_secs': 0.022935} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.939945] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.940215] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 937.940532] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.940734] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.941084] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 937.941293] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f0b50f2-3355-45eb-8c71-d40daa014886 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.955081] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 937.955307] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 937.956510] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb07572c-548c-4136-8950-22f8b516fa63 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.965443] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 937.965443] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5246798b-4a43-d691-f6b1-edabfadf08bd" [ 937.965443] env[61974]: _type = "Task" [ 937.965443] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.974523] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5246798b-4a43-d691-f6b1-edabfadf08bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.012827] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ad5209-823c-4646-a24d-01964f73643c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.020166] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc7f5f9-5804-4d1e-99d6-aacf301cad3a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.052359] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-153d1595-c4d3-4623-b14e-7c61c31c3a44 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.061245] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a39f89-438c-4cc9-b99c-69150a505dca {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.075845] env[61974]: DEBUG nova.compute.provider_tree [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 938.077626] env[61974]: DEBUG nova.network.neutron [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Successfully created port: e377f334-8d36-4f17-8532-abbd37c47eba {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 938.098594] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76a24721-c219-441c-bdcd-242128e3e5b1 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "0ce75511-290c-4fea-9657-dfdd8d9efc4b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.665s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.206762] env[61974]: DEBUG nova.compute.manager [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 938.324259] env[61974]: DEBUG oslo_vmware.api [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379088, 'name': PowerOnVM_Task, 'duration_secs': 0.634889} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.324551] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 938.324752] env[61974]: DEBUG nova.compute.manager [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 938.325591] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35dd0d47-b6c3-4bb3-adda-a4a912d3d447 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.449481] env[61974]: DEBUG nova.network.neutron [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 938.481224] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5246798b-4a43-d691-f6b1-edabfadf08bd, 'name': SearchDatastore_Task, 'duration_secs': 0.020264} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.482913] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39f6a9c4-b768-4b41-8976-42d43a6a4edd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.491650] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 938.491650] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52e89361-8b6f-dba0-d5f4-a8c1752c0758" [ 938.491650] env[61974]: _type = "Task" [ 938.491650] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.503421] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52e89361-8b6f-dba0-d5f4-a8c1752c0758, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.581704] env[61974]: DEBUG nova.scheduler.client.report [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 938.601667] env[61974]: DEBUG nova.compute.manager [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 938.802775] env[61974]: DEBUG nova.virt.hardware [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 938.803117] env[61974]: DEBUG nova.virt.hardware [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 938.803321] env[61974]: DEBUG nova.virt.hardware [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 938.803555] env[61974]: DEBUG nova.virt.hardware [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 938.803719] env[61974]: DEBUG nova.virt.hardware [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 938.803874] env[61974]: DEBUG nova.virt.hardware [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 938.804325] env[61974]: DEBUG nova.virt.hardware [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 938.804325] env[61974]: DEBUG nova.virt.hardware [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 938.804443] env[61974]: DEBUG nova.virt.hardware [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 938.804592] env[61974]: DEBUG nova.virt.hardware [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 938.804773] env[61974]: DEBUG nova.virt.hardware [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 938.806185] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e926154-b074-4f94-b2ee-231db4fba6bc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.814584] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034a30cf-305c-4433-bf7a-94cef1358d35 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.830473] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Instance VIF info [] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 938.836165] env[61974]: DEBUG oslo.service.loopingcall [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 938.839901] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 938.842148] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cde630dc-320f-4635-bccd-ed62271470a9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.856040] env[61974]: DEBUG oslo_concurrency.lockutils [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.857166] env[61974]: DEBUG nova.compute.manager [req-7e460f98-3478-4847-a32a-50f5ffb4134c req-dffd88d7-7a3e-4c6f-97d4-1f0f5527dd11 service nova] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Received event network-changed-ce99556d-de01-4549-9f98-e0e52d4a0b16 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 938.857343] env[61974]: DEBUG nova.compute.manager [req-7e460f98-3478-4847-a32a-50f5ffb4134c req-dffd88d7-7a3e-4c6f-97d4-1f0f5527dd11 service nova] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Refreshing instance network info cache due to event network-changed-ce99556d-de01-4549-9f98-e0e52d4a0b16. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 938.857493] env[61974]: DEBUG oslo_concurrency.lockutils [req-7e460f98-3478-4847-a32a-50f5ffb4134c req-dffd88d7-7a3e-4c6f-97d4-1f0f5527dd11 service nova] Acquiring lock "refresh_cache-097ad079-9712-4183-9135-b15ad3a65d6d" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.862733] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 938.862733] env[61974]: value = "task-1379090" [ 938.862733] env[61974]: _type = "Task" [ 938.862733] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.872706] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379090, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.885168] env[61974]: DEBUG nova.network.neutron [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Updating instance_info_cache with network_info: [{"id": "27cf697b-5e9a-4214-907e-4bd03824c8fa", "address": "fa:16:3e:6e:7d:3e", "network": {"id": "1ca2c190-2a69-4156-a5bc-15ace2b944c7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1620571047", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.56", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a117fc7495e1478b83f0a543effe8e06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27cf697b-5e", "ovs_interfaceid": "27cf697b-5e9a-4214-907e-4bd03824c8fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ce99556d-de01-4549-9f98-e0e52d4a0b16", "address": "fa:16:3e:0c:35:12", "network": {"id": "da377860-1698-49c2-b451-e605b66bb70f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1093528136", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.199", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a117fc7495e1478b83f0a543effe8e06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce99556d-de", "ovs_interfaceid": "ce99556d-de01-4549-9f98-e0e52d4a0b16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.003744] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52e89361-8b6f-dba0-d5f4-a8c1752c0758, 'name': SearchDatastore_Task, 'duration_secs': 0.028307} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.004109] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.004526] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 45fda940-b7f0-410c-b31a-b5cd365c28fe/45fda940-b7f0-410c-b31a-b5cd365c28fe.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 939.004826] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d90bb3d-8a3c-45ae-ad80-4e0cbcef1323 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.012898] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 939.012898] env[61974]: value = "task-1379091" [ 939.012898] env[61974]: _type = "Task" [ 939.012898] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.023149] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379091, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.088763] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.894s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.090892] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.851s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.092534] env[61974]: INFO nova.compute.claims [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 939.114683] env[61974]: INFO nova.scheduler.client.report [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Deleted allocations for instance a9edbd98-3e67-476b-934d-15d893a62d02 [ 939.130708] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.226028] env[61974]: DEBUG nova.compute.manager [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 939.248951] env[61974]: DEBUG nova.virt.hardware [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 939.248951] env[61974]: DEBUG nova.virt.hardware [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 939.249243] env[61974]: DEBUG nova.virt.hardware [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 939.249353] env[61974]: DEBUG nova.virt.hardware [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 939.249582] env[61974]: DEBUG nova.virt.hardware [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 939.249789] env[61974]: DEBUG nova.virt.hardware [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 939.250094] env[61974]: DEBUG nova.virt.hardware [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 939.250309] env[61974]: DEBUG nova.virt.hardware [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 939.250634] env[61974]: DEBUG nova.virt.hardware [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 939.250757] env[61974]: DEBUG nova.virt.hardware [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 939.250965] env[61974]: DEBUG nova.virt.hardware [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 939.252108] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a723d5d5-283c-4000-a3b1-b8d85061f450 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.261335] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25aadd45-5396-4e3c-8d42-dae54706f456 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.374436] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379090, 'name': CreateVM_Task, 'duration_secs': 0.311627} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.374635] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 939.375862] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.375862] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.375862] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 939.376231] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39d5c475-6772-4018-af17-7ddb0d54a225 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.382347] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for the task: (returnval){ [ 939.382347] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52789a5b-43be-80c5-8b6f-be4fad23058f" [ 939.382347] env[61974]: _type = "Task" [ 939.382347] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.392176] env[61974]: DEBUG oslo_concurrency.lockutils [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Releasing lock "refresh_cache-097ad079-9712-4183-9135-b15ad3a65d6d" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.392577] env[61974]: DEBUG nova.compute.manager [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Instance network_info: |[{"id": "27cf697b-5e9a-4214-907e-4bd03824c8fa", "address": "fa:16:3e:6e:7d:3e", "network": {"id": "1ca2c190-2a69-4156-a5bc-15ace2b944c7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1620571047", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.56", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a117fc7495e1478b83f0a543effe8e06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27cf697b-5e", "ovs_interfaceid": "27cf697b-5e9a-4214-907e-4bd03824c8fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ce99556d-de01-4549-9f98-e0e52d4a0b16", "address": "fa:16:3e:0c:35:12", "network": {"id": "da377860-1698-49c2-b451-e605b66bb70f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1093528136", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.199", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a117fc7495e1478b83f0a543effe8e06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce99556d-de", "ovs_interfaceid": "ce99556d-de01-4549-9f98-e0e52d4a0b16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 939.392891] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52789a5b-43be-80c5-8b6f-be4fad23058f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.393203] env[61974]: DEBUG oslo_concurrency.lockutils [req-7e460f98-3478-4847-a32a-50f5ffb4134c req-dffd88d7-7a3e-4c6f-97d4-1f0f5527dd11 service nova] Acquired lock "refresh_cache-097ad079-9712-4183-9135-b15ad3a65d6d" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.393411] env[61974]: DEBUG nova.network.neutron [req-7e460f98-3478-4847-a32a-50f5ffb4134c req-dffd88d7-7a3e-4c6f-97d4-1f0f5527dd11 service nova] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Refreshing network info cache for port ce99556d-de01-4549-9f98-e0e52d4a0b16 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 939.394739] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:7d:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee4b2432-c393-4e50-ae0e-b5e12bad37db', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '27cf697b-5e9a-4214-907e-4bd03824c8fa', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:35:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ed4797-90ad-44cd-bbcb-e90b2a8400f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce99556d-de01-4549-9f98-e0e52d4a0b16', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 939.406938] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Creating folder: Project (a117fc7495e1478b83f0a543effe8e06). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 939.408467] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a72608c-9ffc-4dc1-86d9-12a1bbfa5c25 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.423944] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Created folder: Project (a117fc7495e1478b83f0a543effe8e06) in parent group-v292912. [ 939.424141] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Creating folder: Instances. Parent ref: group-v292970. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 939.424505] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c31d1b58-c12c-4ba8-ae7c-83077b50db66 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.428883] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "f0601d26-4e29-4946-bb52-50e2a2163535" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.429571] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "f0601d26-4e29-4946-bb52-50e2a2163535" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.429795] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "f0601d26-4e29-4946-bb52-50e2a2163535-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.430068] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "f0601d26-4e29-4946-bb52-50e2a2163535-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.430319] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "f0601d26-4e29-4946-bb52-50e2a2163535-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.433481] env[61974]: INFO nova.compute.manager [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Terminating instance [ 939.436659] env[61974]: DEBUG nova.compute.manager [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 939.437048] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 939.438319] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccbd136e-b13a-4094-b966-8f69129e509c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.443108] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Created folder: Instances in parent group-v292970. [ 939.443108] env[61974]: DEBUG oslo.service.loopingcall [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 939.443855] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 939.444274] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2da1625f-7240-402e-a203-fcf9842ee0f9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.464313] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 939.465131] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4eb26d41-890c-41d2-ae81-eae7fb943efd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.471070] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 939.471070] env[61974]: value = "task-1379094" [ 939.471070] env[61974]: _type = "Task" [ 939.471070] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.472613] env[61974]: DEBUG oslo_vmware.api [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 939.472613] env[61974]: value = "task-1379095" [ 939.472613] env[61974]: _type = "Task" [ 939.472613] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.485435] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379094, 'name': CreateVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.489560] env[61974]: DEBUG oslo_vmware.api [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379095, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.524168] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379091, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.577558] env[61974]: DEBUG nova.compute.manager [req-aa4f065e-4c49-4b35-b281-4d07419651a4 req-2b02cc2d-d269-4cc9-928c-dbbadea5250d service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Received event network-vif-plugged-e377f334-8d36-4f17-8532-abbd37c47eba {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 939.577950] env[61974]: DEBUG oslo_concurrency.lockutils [req-aa4f065e-4c49-4b35-b281-4d07419651a4 req-2b02cc2d-d269-4cc9-928c-dbbadea5250d service nova] Acquiring lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.578170] env[61974]: DEBUG oslo_concurrency.lockutils [req-aa4f065e-4c49-4b35-b281-4d07419651a4 req-2b02cc2d-d269-4cc9-928c-dbbadea5250d service nova] Lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.578389] env[61974]: DEBUG oslo_concurrency.lockutils [req-aa4f065e-4c49-4b35-b281-4d07419651a4 req-2b02cc2d-d269-4cc9-928c-dbbadea5250d service nova] Lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.578584] env[61974]: DEBUG nova.compute.manager [req-aa4f065e-4c49-4b35-b281-4d07419651a4 req-2b02cc2d-d269-4cc9-928c-dbbadea5250d service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] No waiting events found dispatching network-vif-plugged-e377f334-8d36-4f17-8532-abbd37c47eba {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 939.578775] env[61974]: WARNING nova.compute.manager [req-aa4f065e-4c49-4b35-b281-4d07419651a4 req-2b02cc2d-d269-4cc9-928c-dbbadea5250d service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Received unexpected event network-vif-plugged-e377f334-8d36-4f17-8532-abbd37c47eba for instance with vm_state building and task_state spawning. [ 939.627078] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5ad4b6fc-7699-46ca-a294-c24ec59485e3 tempest-ServerRescueNegativeTestJSON-2025890613 tempest-ServerRescueNegativeTestJSON-2025890613-project-member] Lock "a9edbd98-3e67-476b-934d-15d893a62d02" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.253s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.897884] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52789a5b-43be-80c5-8b6f-be4fad23058f, 'name': SearchDatastore_Task, 'duration_secs': 0.059254} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.898645] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.898905] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 939.899322] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.899620] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.900289] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 939.900430] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5528a9ed-e872-489c-aa90-3dd6a87c45b9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.915286] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 939.915597] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 939.916791] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8baecf04-01af-4cab-b5d2-381fa0b7b216 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.924339] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for the task: (returnval){ [ 939.924339] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52d494fd-7b55-f455-56d1-a9c6e7261d84" [ 939.924339] env[61974]: _type = "Task" [ 939.924339] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.935682] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d494fd-7b55-f455-56d1-a9c6e7261d84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.966354] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "abe0168a-e838-468a-a223-7c2a64497c0c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.966741] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "abe0168a-e838-468a-a223-7c2a64497c0c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.988503] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379094, 'name': CreateVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.991352] env[61974]: DEBUG oslo_vmware.api [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379095, 'name': PowerOffVM_Task, 'duration_secs': 0.310478} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.991640] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 939.992636] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 939.992636] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0da946d-6d36-4775-a90f-f21fcbcd2a05 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.024117] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379091, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.636718} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.026481] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 45fda940-b7f0-410c-b31a-b5cd365c28fe/45fda940-b7f0-410c-b31a-b5cd365c28fe.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 940.026722] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 940.027020] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a74295b3-968c-4558-9a65-f45af3fdce13 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.033357] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 940.033357] env[61974]: value = "task-1379097" [ 940.033357] env[61974]: _type = "Task" [ 940.033357] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.044089] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379097, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.065433] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 940.065684] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 940.065948] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Deleting the datastore file [datastore2] f0601d26-4e29-4946-bb52-50e2a2163535 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 940.066341] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9104fdec-85fb-4b9d-ac73-360cecff4d4c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.073867] env[61974]: DEBUG oslo_vmware.api [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 940.073867] env[61974]: value = "task-1379098" [ 940.073867] env[61974]: _type = "Task" [ 940.073867] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.083985] env[61974]: DEBUG oslo_vmware.api [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379098, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.161026] env[61974]: DEBUG nova.network.neutron [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Successfully updated port: e377f334-8d36-4f17-8532-abbd37c47eba {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 940.193744] env[61974]: DEBUG nova.network.neutron [req-7e460f98-3478-4847-a32a-50f5ffb4134c req-dffd88d7-7a3e-4c6f-97d4-1f0f5527dd11 service nova] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Updated VIF entry in instance network info cache for port ce99556d-de01-4549-9f98-e0e52d4a0b16. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 940.194212] env[61974]: DEBUG nova.network.neutron [req-7e460f98-3478-4847-a32a-50f5ffb4134c req-dffd88d7-7a3e-4c6f-97d4-1f0f5527dd11 service nova] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Updating instance_info_cache with network_info: [{"id": "27cf697b-5e9a-4214-907e-4bd03824c8fa", "address": "fa:16:3e:6e:7d:3e", "network": {"id": "1ca2c190-2a69-4156-a5bc-15ace2b944c7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1620571047", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.56", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a117fc7495e1478b83f0a543effe8e06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee4b2432-c393-4e50-ae0e-b5e12bad37db", "external-id": "nsx-vlan-transportzone-985", "segmentation_id": 985, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27cf697b-5e", "ovs_interfaceid": "27cf697b-5e9a-4214-907e-4bd03824c8fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ce99556d-de01-4549-9f98-e0e52d4a0b16", "address": "fa:16:3e:0c:35:12", "network": {"id": "da377860-1698-49c2-b451-e605b66bb70f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1093528136", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.199", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a117fc7495e1478b83f0a543effe8e06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce99556d-de", "ovs_interfaceid": "ce99556d-de01-4549-9f98-e0e52d4a0b16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.209924] env[61974]: DEBUG oslo_vmware.rw_handles [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b7b44e-f32b-49da-024b-3e3ce207e2eb/disk-0.vmdk. {{(pid=61974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 940.211071] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af5f2b5-d668-4239-a43d-d13b140a1816 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.219371] env[61974]: DEBUG oslo_vmware.rw_handles [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b7b44e-f32b-49da-024b-3e3ce207e2eb/disk-0.vmdk is in state: ready. {{(pid=61974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 940.219547] env[61974]: ERROR oslo_vmware.rw_handles [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b7b44e-f32b-49da-024b-3e3ce207e2eb/disk-0.vmdk due to incomplete transfer. [ 940.220018] env[61974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-ee0d55e9-c422-413f-8fe4-fc83999dddd9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.226855] env[61974]: DEBUG oslo_vmware.rw_handles [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b7b44e-f32b-49da-024b-3e3ce207e2eb/disk-0.vmdk. {{(pid=61974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 940.227644] env[61974]: DEBUG nova.virt.vmwareapi.images [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Uploaded image 7b64f88a-41ea-4e46-a992-5bf41cb091d5 to the Glance image server {{(pid=61974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 940.229592] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Destroying the VM {{(pid=61974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 940.232036] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ddc88fbd-30d8-4c57-b265-7629c73d585e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.239480] env[61974]: DEBUG oslo_vmware.api [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 940.239480] env[61974]: value = "task-1379099" [ 940.239480] env[61974]: _type = "Task" [ 940.239480] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.250450] env[61974]: DEBUG oslo_vmware.api [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379099, 'name': Destroy_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.297010] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Volume attach. Driver type: vmdk {{(pid=61974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 940.297590] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-292967', 'volume_id': '004ebd4b-70b2-40ab-8253-dc095c5312e1', 'name': 'volume-004ebd4b-70b2-40ab-8253-dc095c5312e1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1aa2a63c-e352-4c9b-9445-9b45bf3ae14c', 'attached_at': '', 'detached_at': '', 'volume_id': '004ebd4b-70b2-40ab-8253-dc095c5312e1', 'serial': '004ebd4b-70b2-40ab-8253-dc095c5312e1'} {{(pid=61974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 940.298640] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-881ee60b-5e0a-4f6b-b444-0180d7db7f81 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.322417] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cbc3e1e-58b0-4dc4-b104-da706cbae814 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.347307] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] volume-004ebd4b-70b2-40ab-8253-dc095c5312e1/volume-004ebd4b-70b2-40ab-8253-dc095c5312e1.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 940.350559] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b424389-92b8-4fe6-8719-9b0bfc7ac6ef {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.368632] env[61974]: DEBUG oslo_vmware.api [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 940.368632] env[61974]: value = "task-1379100" [ 940.368632] env[61974]: _type = "Task" [ 940.368632] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.378547] env[61974]: DEBUG oslo_vmware.api [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379100, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.435910] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d494fd-7b55-f455-56d1-a9c6e7261d84, 'name': SearchDatastore_Task, 'duration_secs': 0.018886} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.439314] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7014b739-4f81-4ec5-96e3-b93b79faff5b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.444570] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for the task: (returnval){ [ 940.444570] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52f37fa0-0980-7730-42a2-0715c20b4ed0" [ 940.444570] env[61974]: _type = "Task" [ 940.444570] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.449860] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b5e28b8-6cb7-4e4c-8180-94408656c7e1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.455332] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52f37fa0-0980-7730-42a2-0715c20b4ed0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.459646] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb5d9f8a-4f1e-4610-9acf-1960ba9c66b0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.493548] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d74a28-3b87-43e8-9bea-13cc26ab5c8c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.503913] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379094, 'name': CreateVM_Task, 'duration_secs': 0.554355} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.504167] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 940.505353] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecdec47e-bad7-4eb2-ac70-e10be1c6b98a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.509522] env[61974]: DEBUG oslo_concurrency.lockutils [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.509690] env[61974]: DEBUG oslo_concurrency.lockutils [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.510015] env[61974]: DEBUG oslo_concurrency.lockutils [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 940.510273] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23bf6e6f-72c3-4ab8-bfee-5bcccce53aab {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.514576] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Waiting for the task: (returnval){ [ 940.514576] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]524ef5e6-118e-60dc-00f7-a41efb9639b2" [ 940.514576] env[61974]: _type = "Task" [ 940.514576] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.522969] env[61974]: DEBUG nova.compute.provider_tree [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 940.532555] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]524ef5e6-118e-60dc-00f7-a41efb9639b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.543632] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379097, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.126718} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.543881] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 940.544662] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed614b4-699a-45b8-9751-8c5812650455 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.565129] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 45fda940-b7f0-410c-b31a-b5cd365c28fe/45fda940-b7f0-410c-b31a-b5cd365c28fe.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 940.565129] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ede4b60a-d3f0-4857-ad7b-df07566aab30 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.595992] env[61974]: DEBUG oslo_vmware.api [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379098, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.597893] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 940.597893] env[61974]: value = "task-1379101" [ 940.597893] env[61974]: _type = "Task" [ 940.597893] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.609612] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379101, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.667788] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "refresh_cache-1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.667788] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquired lock "refresh_cache-1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.667788] env[61974]: DEBUG nova.network.neutron [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 940.699567] env[61974]: DEBUG oslo_concurrency.lockutils [req-7e460f98-3478-4847-a32a-50f5ffb4134c req-dffd88d7-7a3e-4c6f-97d4-1f0f5527dd11 service nova] Releasing lock "refresh_cache-097ad079-9712-4183-9135-b15ad3a65d6d" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.751158] env[61974]: DEBUG oslo_vmware.api [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379099, 'name': Destroy_Task} progress is 33%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.879020] env[61974]: DEBUG oslo_vmware.api [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379100, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.886906] env[61974]: DEBUG nova.compute.manager [req-8660c386-1eba-4b9f-8247-23e812ba76c3 req-d9296f0e-fd44-46f2-9c75-b280209f3ce9 service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Received event network-changed-e377f334-8d36-4f17-8532-abbd37c47eba {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 940.887060] env[61974]: DEBUG nova.compute.manager [req-8660c386-1eba-4b9f-8247-23e812ba76c3 req-d9296f0e-fd44-46f2-9c75-b280209f3ce9 service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Refreshing instance network info cache due to event network-changed-e377f334-8d36-4f17-8532-abbd37c47eba. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 940.887270] env[61974]: DEBUG oslo_concurrency.lockutils [req-8660c386-1eba-4b9f-8247-23e812ba76c3 req-d9296f0e-fd44-46f2-9c75-b280209f3ce9 service nova] Acquiring lock "refresh_cache-1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.960587] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52f37fa0-0980-7730-42a2-0715c20b4ed0, 'name': SearchDatastore_Task, 'duration_secs': 0.039887} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.960887] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.961176] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 7b338210-5be8-4838-b815-8f2c6cc19ccd/7b338210-5be8-4838-b815-8f2c6cc19ccd.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 940.961501] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b4f4972-7043-431d-9247-9d0403c06443 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.969720] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for the task: (returnval){ [ 940.969720] env[61974]: value = "task-1379102" [ 940.969720] env[61974]: _type = "Task" [ 940.969720] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.977988] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379102, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.028755] env[61974]: DEBUG nova.scheduler.client.report [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 941.036659] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]524ef5e6-118e-60dc-00f7-a41efb9639b2, 'name': SearchDatastore_Task, 'duration_secs': 0.030161} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.037927] env[61974]: DEBUG oslo_concurrency.lockutils [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.037927] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 941.037927] env[61974]: DEBUG oslo_concurrency.lockutils [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.037927] env[61974]: DEBUG oslo_concurrency.lockutils [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.038575] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 941.038575] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c5fdf7e-5183-4c57-8858-011f0d344981 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.052435] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 941.052435] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 941.052435] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3685778c-8c91-4910-916c-b429a0f253f1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.060755] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Waiting for the task: (returnval){ [ 941.060755] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5246d93c-ae15-a3aa-5386-bddceb361afe" [ 941.060755] env[61974]: _type = "Task" [ 941.060755] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.070466] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5246d93c-ae15-a3aa-5386-bddceb361afe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.095773] env[61974]: DEBUG oslo_vmware.api [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379098, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.587108} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.095985] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 941.096283] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 941.096513] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 941.096756] env[61974]: INFO nova.compute.manager [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Took 1.66 seconds to destroy the instance on the hypervisor. [ 941.097037] env[61974]: DEBUG oslo.service.loopingcall [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 941.097241] env[61974]: DEBUG nova.compute.manager [-] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 941.097331] env[61974]: DEBUG nova.network.neutron [-] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 941.109540] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379101, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.223923] env[61974]: DEBUG nova.network.neutron [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 941.256981] env[61974]: DEBUG oslo_vmware.api [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379099, 'name': Destroy_Task, 'duration_secs': 0.824955} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.257860] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Destroyed the VM [ 941.258079] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Deleting Snapshot of the VM instance {{(pid=61974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 941.258492] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a15edcf1-e8de-4e94-bcae-d9fbcfcf675d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.265990] env[61974]: DEBUG oslo_vmware.api [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 941.265990] env[61974]: value = "task-1379103" [ 941.265990] env[61974]: _type = "Task" [ 941.265990] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.275936] env[61974]: DEBUG oslo_vmware.api [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379103, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.387124] env[61974]: DEBUG oslo_vmware.api [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379100, 'name': ReconfigVM_Task, 'duration_secs': 0.724353} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.387603] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Reconfigured VM instance instance-00000044 to attach disk [datastore2] volume-004ebd4b-70b2-40ab-8253-dc095c5312e1/volume-004ebd4b-70b2-40ab-8253-dc095c5312e1.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 941.400983] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e378726-205e-4606-a574-112a033c6456 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.418993] env[61974]: DEBUG oslo_vmware.api [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 941.418993] env[61974]: value = "task-1379104" [ 941.418993] env[61974]: _type = "Task" [ 941.418993] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.431092] env[61974]: DEBUG oslo_vmware.api [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379104, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.479545] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379102, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.501237] env[61974]: DEBUG nova.network.neutron [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Updating instance_info_cache with network_info: [{"id": "e377f334-8d36-4f17-8532-abbd37c47eba", "address": "fa:16:3e:7f:63:47", "network": {"id": "a6e61508-1f16-48a9-a21d-2f9212fcf523", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1486161933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb6e7e7e52fc4aacaf5054732cd7d2df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ccbc7a-cf8d-4ea2-8411-291a1e27df7b", "external-id": "nsx-vlan-transportzone-998", "segmentation_id": 998, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape377f334-8d", "ovs_interfaceid": "e377f334-8d36-4f17-8532-abbd37c47eba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.538628] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.448s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.539213] env[61974]: DEBUG nova.compute.manager [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 941.542467] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.257s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.544093] env[61974]: INFO nova.compute.claims [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 941.574249] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5246d93c-ae15-a3aa-5386-bddceb361afe, 'name': SearchDatastore_Task, 'duration_secs': 0.042297} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.575476] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d930fbbd-6a5e-443e-83ce-5e549cc6dd59 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.582224] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Waiting for the task: (returnval){ [ 941.582224] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5250db56-72fe-9e42-2a01-3c6e7cd89e12" [ 941.582224] env[61974]: _type = "Task" [ 941.582224] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.596436] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5250db56-72fe-9e42-2a01-3c6e7cd89e12, 'name': SearchDatastore_Task, 'duration_secs': 0.009761} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.596601] env[61974]: DEBUG oslo_concurrency.lockutils [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.596867] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 097ad079-9712-4183-9135-b15ad3a65d6d/097ad079-9712-4183-9135-b15ad3a65d6d.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 941.597165] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2ebb5fc8-c933-4532-bf3d-f25f5912ba76 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.614668] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Waiting for the task: (returnval){ [ 941.614668] env[61974]: value = "task-1379105" [ 941.614668] env[61974]: _type = "Task" [ 941.614668] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.615204] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379101, 'name': ReconfigVM_Task, 'duration_secs': 0.8671} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.615739] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 45fda940-b7f0-410c-b31a-b5cd365c28fe/45fda940-b7f0-410c-b31a-b5cd365c28fe.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 941.619597] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9106dc82-b751-4e48-8caa-af69182afbc4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.628208] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': task-1379105, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.629831] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 941.629831] env[61974]: value = "task-1379106" [ 941.629831] env[61974]: _type = "Task" [ 941.629831] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.638294] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379106, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.752014] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "635f362a-582e-44bc-85d8-8a69943982b0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.752326] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "635f362a-582e-44bc-85d8-8a69943982b0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.752602] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "635f362a-582e-44bc-85d8-8a69943982b0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.752751] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "635f362a-582e-44bc-85d8-8a69943982b0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.753035] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "635f362a-582e-44bc-85d8-8a69943982b0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.756068] env[61974]: INFO nova.compute.manager [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Terminating instance [ 941.758554] env[61974]: DEBUG nova.compute.manager [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 941.758770] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 941.759557] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e146ea6-156c-456e-b10e-7519f98e0442 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.767148] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 941.770518] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-16f42a82-f1fb-4557-b5b0-7f365bf7bc04 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.777312] env[61974]: DEBUG oslo_vmware.api [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379103, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.928933] env[61974]: DEBUG oslo_vmware.api [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379104, 'name': ReconfigVM_Task, 'duration_secs': 0.184804} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.929347] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-292967', 'volume_id': '004ebd4b-70b2-40ab-8253-dc095c5312e1', 'name': 'volume-004ebd4b-70b2-40ab-8253-dc095c5312e1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1aa2a63c-e352-4c9b-9445-9b45bf3ae14c', 'attached_at': '', 'detached_at': '', 'volume_id': '004ebd4b-70b2-40ab-8253-dc095c5312e1', 'serial': '004ebd4b-70b2-40ab-8253-dc095c5312e1'} {{(pid=61974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 941.980674] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379102, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.005528] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Releasing lock "refresh_cache-1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.005888] env[61974]: DEBUG nova.compute.manager [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Instance network_info: |[{"id": "e377f334-8d36-4f17-8532-abbd37c47eba", "address": "fa:16:3e:7f:63:47", "network": {"id": "a6e61508-1f16-48a9-a21d-2f9212fcf523", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1486161933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb6e7e7e52fc4aacaf5054732cd7d2df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ccbc7a-cf8d-4ea2-8411-291a1e27df7b", "external-id": "nsx-vlan-transportzone-998", "segmentation_id": 998, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape377f334-8d", "ovs_interfaceid": "e377f334-8d36-4f17-8532-abbd37c47eba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 942.006250] env[61974]: DEBUG oslo_concurrency.lockutils [req-8660c386-1eba-4b9f-8247-23e812ba76c3 req-d9296f0e-fd44-46f2-9c75-b280209f3ce9 service nova] Acquired lock "refresh_cache-1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.006438] env[61974]: DEBUG nova.network.neutron [req-8660c386-1eba-4b9f-8247-23e812ba76c3 req-d9296f0e-fd44-46f2-9c75-b280209f3ce9 service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Refreshing network info cache for port e377f334-8d36-4f17-8532-abbd37c47eba {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 942.007683] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:63:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '04ccbc7a-cf8d-4ea2-8411-291a1e27df7b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e377f334-8d36-4f17-8532-abbd37c47eba', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 942.016254] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Creating folder: Project (fb6e7e7e52fc4aacaf5054732cd7d2df). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 942.019728] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-34081265-095b-416c-8c38-12a7c581effc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.030456] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Created folder: Project (fb6e7e7e52fc4aacaf5054732cd7d2df) in parent group-v292912. [ 942.030655] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Creating folder: Instances. Parent ref: group-v292973. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 942.030895] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8e13cf66-9ea6-44ae-811a-821c3716f437 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.039441] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Created folder: Instances in parent group-v292973. [ 942.039687] env[61974]: DEBUG oslo.service.loopingcall [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 942.039889] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 942.040112] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3e1aadf3-e0c5-45f3-ab26-df931f0839c1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.056437] env[61974]: DEBUG nova.compute.utils [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 942.062513] env[61974]: DEBUG nova.compute.manager [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 942.062672] env[61974]: DEBUG nova.network.neutron [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 942.069333] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 942.069333] env[61974]: value = "task-1379110" [ 942.069333] env[61974]: _type = "Task" [ 942.069333] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.079638] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379110, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.088717] env[61974]: DEBUG nova.network.neutron [-] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.127123] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': task-1379105, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.135417] env[61974]: DEBUG nova.policy [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4896588cebd84071a573046de7006429', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2db6af28263c40708c2466226ce03009', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 942.143446] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379106, 'name': Rename_Task, 'duration_secs': 0.175054} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.143747] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 942.144035] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb655198-d579-4a8b-a8ad-e645de29867d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.149941] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 942.149941] env[61974]: value = "task-1379111" [ 942.149941] env[61974]: _type = "Task" [ 942.149941] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.158729] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379111, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.277115] env[61974]: DEBUG oslo_vmware.api [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379103, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.428616] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "5780d1d6-cd40-4b97-8a68-072c090540af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.429047] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "5780d1d6-cd40-4b97-8a68-072c090540af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.432037] env[61974]: DEBUG nova.network.neutron [req-8660c386-1eba-4b9f-8247-23e812ba76c3 req-d9296f0e-fd44-46f2-9c75-b280209f3ce9 service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Updated VIF entry in instance network info cache for port e377f334-8d36-4f17-8532-abbd37c47eba. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 942.432447] env[61974]: DEBUG nova.network.neutron [req-8660c386-1eba-4b9f-8247-23e812ba76c3 req-d9296f0e-fd44-46f2-9c75-b280209f3ce9 service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Updating instance_info_cache with network_info: [{"id": "e377f334-8d36-4f17-8532-abbd37c47eba", "address": "fa:16:3e:7f:63:47", "network": {"id": "a6e61508-1f16-48a9-a21d-2f9212fcf523", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1486161933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb6e7e7e52fc4aacaf5054732cd7d2df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ccbc7a-cf8d-4ea2-8411-291a1e27df7b", "external-id": "nsx-vlan-transportzone-998", "segmentation_id": 998, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape377f334-8d", "ovs_interfaceid": "e377f334-8d36-4f17-8532-abbd37c47eba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.483894] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379102, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.562876] env[61974]: DEBUG nova.compute.manager [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 942.580910] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379110, 'name': CreateVM_Task} progress is 25%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.594324] env[61974]: INFO nova.compute.manager [-] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Took 1.49 seconds to deallocate network for instance. [ 942.633212] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': task-1379105, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.636826] env[61974]: DEBUG nova.network.neutron [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Successfully created port: 2ebf62dc-0f02-4b1b-bd8f-adc0186ae753 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 942.662612] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379111, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.778955] env[61974]: DEBUG oslo_vmware.api [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379103, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.937524] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6824c00-bb77-4205-ba18-ac6d817753e8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.940956] env[61974]: DEBUG oslo_concurrency.lockutils [req-8660c386-1eba-4b9f-8247-23e812ba76c3 req-d9296f0e-fd44-46f2-9c75-b280209f3ce9 service nova] Releasing lock "refresh_cache-1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.945860] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6719a6a-945f-4bb8-8993-5c035afd6c45 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.979616] env[61974]: DEBUG nova.compute.manager [req-29c37d69-d7e3-4183-8889-bdef4a219e66 req-095a95f9-ac61-415e-a496-ca0cde14ce46 service nova] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Received event network-vif-deleted-31fec0d1-b5b6-4c0c-ba81-11fad03cfa19 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 942.983287] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c2ab4e-6864-4ab8-b073-9b8d99581bfc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.988502] env[61974]: DEBUG nova.objects.instance [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lazy-loading 'flavor' on Instance uuid 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 942.995676] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379102, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.778837} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.996916] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ded553f-098b-448b-a582-fa94f59a8c5c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.000858] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 7b338210-5be8-4838-b815-8f2c6cc19ccd/7b338210-5be8-4838-b815-8f2c6cc19ccd.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 943.001329] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 943.001661] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ae02f429-dec5-4d79-9bd8-55a2394b6fb4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.014544] env[61974]: DEBUG nova.compute.provider_tree [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.019887] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for the task: (returnval){ [ 943.019887] env[61974]: value = "task-1379112" [ 943.019887] env[61974]: _type = "Task" [ 943.019887] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.028247] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379112, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.085939] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379110, 'name': CreateVM_Task, 'duration_secs': 0.849108} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.086156] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 943.086859] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.087028] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.087357] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 943.087610] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91b5821e-304e-4e26-9e14-604fe40a0729 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.092543] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 943.092543] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52d6550f-4da6-0083-84a1-551bddfec439" [ 943.092543] env[61974]: _type = "Task" [ 943.092543] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.097493] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 943.097799] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 943.097995] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Deleting the datastore file [datastore1] 635f362a-582e-44bc-85d8-8a69943982b0 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 943.101354] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-686ec90d-109a-4ab2-99d7-823fd822bd5f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.103598] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.103876] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d6550f-4da6-0083-84a1-551bddfec439, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.111232] env[61974]: DEBUG oslo_vmware.api [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 943.111232] env[61974]: value = "task-1379113" [ 943.111232] env[61974]: _type = "Task" [ 943.111232] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.120444] env[61974]: DEBUG oslo_vmware.api [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379113, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.130889] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': task-1379105, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.139461} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.131213] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 097ad079-9712-4183-9135-b15ad3a65d6d/097ad079-9712-4183-9135-b15ad3a65d6d.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 943.131466] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 943.131750] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-36a03ba3-20a2-4393-8f50-7f5a143a2a46 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.137730] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Waiting for the task: (returnval){ [ 943.137730] env[61974]: value = "task-1379114" [ 943.137730] env[61974]: _type = "Task" [ 943.137730] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.146018] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': task-1379114, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.160433] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379111, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.278127] env[61974]: DEBUG oslo_vmware.api [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379103, 'name': RemoveSnapshot_Task, 'duration_secs': 1.66882} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.278454] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Deleted Snapshot of the VM instance {{(pid=61974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 943.278697] env[61974]: INFO nova.compute.manager [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Took 15.67 seconds to snapshot the instance on the hypervisor. [ 943.493973] env[61974]: DEBUG oslo_concurrency.lockutils [None req-189ff09b-2fc6-432b-aa2c-d629c58a2ad9 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "1aa2a63c-e352-4c9b-9445-9b45bf3ae14c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.822s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.517845] env[61974]: DEBUG nova.scheduler.client.report [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 943.530625] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379112, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.112675} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.530913] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 943.531691] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f6b450-da10-4c15-9c5d-f93717b3c243 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.551438] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 7b338210-5be8-4838-b815-8f2c6cc19ccd/7b338210-5be8-4838-b815-8f2c6cc19ccd.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 943.551932] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7bf9c7dc-50a7-46ee-bc87-4ced499c3cc7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.570546] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for the task: (returnval){ [ 943.570546] env[61974]: value = "task-1379115" [ 943.570546] env[61974]: _type = "Task" [ 943.570546] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.578715] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379115, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.582039] env[61974]: DEBUG nova.compute.manager [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 943.602160] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d6550f-4da6-0083-84a1-551bddfec439, 'name': SearchDatastore_Task, 'duration_secs': 0.019156} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.604373] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.604627] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 943.604864] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.605027] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.605222] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 943.605660] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a391a6aa-4277-4690-8821-272dc366bcc0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.611861] env[61974]: DEBUG nova.virt.hardware [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 943.612106] env[61974]: DEBUG nova.virt.hardware [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 943.612274] env[61974]: DEBUG nova.virt.hardware [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 943.612513] env[61974]: DEBUG nova.virt.hardware [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 943.612606] env[61974]: DEBUG nova.virt.hardware [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 943.612765] env[61974]: DEBUG nova.virt.hardware [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 943.612967] env[61974]: DEBUG nova.virt.hardware [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 943.613157] env[61974]: DEBUG nova.virt.hardware [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 943.613377] env[61974]: DEBUG nova.virt.hardware [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 943.613522] env[61974]: DEBUG nova.virt.hardware [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 943.613756] env[61974]: DEBUG nova.virt.hardware [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 943.614601] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64252fd3-626e-4ed5-bb9e-79859b385903 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.620799] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 943.620978] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 943.624676] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3415041-90c1-4373-b5a4-502ee997b681 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.632891] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa1b8fa-35cc-4e92-88a1-e62a463be302 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.642508] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 943.642508] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5238270d-96e4-355b-8f2a-6ad2e241828b" [ 943.642508] env[61974]: _type = "Task" [ 943.642508] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.642687] env[61974]: DEBUG oslo_vmware.api [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379113, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188086} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.646045] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 943.646265] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 943.646452] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 943.646631] env[61974]: INFO nova.compute.manager [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Took 1.89 seconds to destroy the instance on the hypervisor. [ 943.646871] env[61974]: DEBUG oslo.service.loopingcall [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 943.660499] env[61974]: DEBUG nova.compute.manager [-] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 943.660499] env[61974]: DEBUG nova.network.neutron [-] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 943.667350] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': task-1379114, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087363} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.667930] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 943.671720] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-babbfc5d-0bbe-496b-9990-35a504d986d3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.674070] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5238270d-96e4-355b-8f2a-6ad2e241828b, 'name': SearchDatastore_Task, 'duration_secs': 0.00851} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.679910] env[61974]: DEBUG oslo_vmware.api [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379111, 'name': PowerOnVM_Task, 'duration_secs': 1.40389} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.680430] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-607a6a2e-db68-40d1-9f32-4a42855cb3dd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.682570] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 943.682776] env[61974]: INFO nova.compute.manager [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Took 6.88 seconds to spawn the instance on the hypervisor. [ 943.682957] env[61974]: DEBUG nova.compute.manager [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 943.695257] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fba9ce68-c32a-470d-b77f-ac852457d9d4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.706165] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 097ad079-9712-4183-9135-b15ad3a65d6d/097ad079-9712-4183-9135-b15ad3a65d6d.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 943.709017] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0af9d8b5-47c3-48a1-92f2-24ba0b7b0867 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.725869] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 943.725869] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52d61c02-5bee-44fc-6fec-8c61c615ffd8" [ 943.725869] env[61974]: _type = "Task" [ 943.725869] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.735258] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Waiting for the task: (returnval){ [ 943.735258] env[61974]: value = "task-1379116" [ 943.735258] env[61974]: _type = "Task" [ 943.735258] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.742584] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d61c02-5bee-44fc-6fec-8c61c615ffd8, 'name': SearchDatastore_Task, 'duration_secs': 0.00886} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.743198] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.743468] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb/1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 943.743721] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-abacc481-7cb3-4cda-94f4-096f24ca49e2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.748865] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': task-1379116, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.753483] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 943.753483] env[61974]: value = "task-1379117" [ 943.753483] env[61974]: _type = "Task" [ 943.753483] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.761277] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379117, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.782400] env[61974]: DEBUG nova.compute.manager [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Instance disappeared during snapshot {{(pid=61974) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4494}} [ 943.794359] env[61974]: DEBUG nova.compute.manager [None req-673787f3-f149-4a3e-a594-22418c613eb9 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Image not found during clean up 7b64f88a-41ea-4e46-a992-5bf41cb091d5 {{(pid=61974) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4500}} [ 944.026748] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.484s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.027344] env[61974]: DEBUG nova.compute.manager [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 944.030714] env[61974]: DEBUG oslo_concurrency.lockutils [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.577s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.031618] env[61974]: DEBUG nova.objects.instance [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Lazy-loading 'resources' on Instance uuid f88f0ef2-24f2-4eef-92a3-8de2ebb6944a {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.080783] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379115, 'name': ReconfigVM_Task, 'duration_secs': 0.323322} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.081054] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 7b338210-5be8-4838-b815-8f2c6cc19ccd/7b338210-5be8-4838-b815-8f2c6cc19ccd.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 944.081710] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-da64295b-6c12-41b5-9e37-878ca2400fbc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.089188] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for the task: (returnval){ [ 944.089188] env[61974]: value = "task-1379118" [ 944.089188] env[61974]: _type = "Task" [ 944.089188] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.097876] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379118, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.129113] env[61974]: DEBUG oslo_concurrency.lockutils [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "1aa2a63c-e352-4c9b-9445-9b45bf3ae14c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.129583] env[61974]: DEBUG oslo_concurrency.lockutils [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "1aa2a63c-e352-4c9b-9445-9b45bf3ae14c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.129902] env[61974]: DEBUG oslo_concurrency.lockutils [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "1aa2a63c-e352-4c9b-9445-9b45bf3ae14c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.130213] env[61974]: DEBUG oslo_concurrency.lockutils [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "1aa2a63c-e352-4c9b-9445-9b45bf3ae14c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.130476] env[61974]: DEBUG oslo_concurrency.lockutils [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "1aa2a63c-e352-4c9b-9445-9b45bf3ae14c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.132878] env[61974]: INFO nova.compute.manager [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Terminating instance [ 944.135727] env[61974]: DEBUG nova.compute.manager [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 944.135958] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 944.136268] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3de94fd-ba52-4e07-a649-c426e0ed97ef {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.143020] env[61974]: DEBUG oslo_vmware.api [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 944.143020] env[61974]: value = "task-1379119" [ 944.143020] env[61974]: _type = "Task" [ 944.143020] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.151755] env[61974]: DEBUG oslo_vmware.api [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379119, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.244731] env[61974]: INFO nova.compute.manager [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Took 27.48 seconds to build instance. [ 944.251460] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': task-1379116, 'name': ReconfigVM_Task, 'duration_secs': 0.396767} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.251742] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 097ad079-9712-4183-9135-b15ad3a65d6d/097ad079-9712-4183-9135-b15ad3a65d6d.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 944.252645] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-74f380f5-5ca8-4b40-b1b9-1e174f2c211b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.261353] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Waiting for the task: (returnval){ [ 944.261353] env[61974]: value = "task-1379120" [ 944.261353] env[61974]: _type = "Task" [ 944.261353] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.265326] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379117, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503957} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.268688] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb/1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 944.268924] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 944.269207] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5edad02e-c08f-403e-90ab-1c6c64c334b9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.278333] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': task-1379120, 'name': Rename_Task} progress is 10%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.278652] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 944.278652] env[61974]: value = "task-1379121" [ 944.278652] env[61974]: _type = "Task" [ 944.278652] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.287385] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379121, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.438077] env[61974]: DEBUG nova.network.neutron [-] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.532643] env[61974]: DEBUG nova.network.neutron [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Successfully updated port: 2ebf62dc-0f02-4b1b-bd8f-adc0186ae753 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 944.535041] env[61974]: DEBUG nova.compute.utils [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 944.538655] env[61974]: DEBUG nova.compute.manager [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 944.538883] env[61974]: DEBUG nova.network.neutron [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 944.583749] env[61974]: DEBUG nova.policy [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91e186033f624742a59502ddf87167f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '102785ae1c584cdb925a55afc3412fb9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 944.599911] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379118, 'name': Rename_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.655771] env[61974]: DEBUG oslo_vmware.api [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379119, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.747139] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c0a305f4-6550-4ba0-ad88-d2730530291d tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "45fda940-b7f0-410c-b31a-b5cd365c28fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.859s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.774478] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': task-1379120, 'name': Rename_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.787249] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379121, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064642} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.789620] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 944.790580] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92154636-9e10-44d0-9143-aa7a7d04739b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.813247] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb/1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 944.815868] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f807d66-2871-4394-88d9-a0406aa34475 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.836586] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 944.836586] env[61974]: value = "task-1379122" [ 944.836586] env[61974]: _type = "Task" [ 944.836586] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.849425] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379122, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.852956] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96010053-4e61-412b-b106-b9fee2c8e144 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.861262] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc080e3-5c3f-4e7c-b9e3-40fb9bc8a690 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.892325] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f96c69fa-01d8-4c67-8b6b-1b17ae563a97 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.895745] env[61974]: DEBUG nova.network.neutron [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Successfully created port: a342d02a-7577-428c-946f-e5725112ceec {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 944.903466] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a930abd-03a1-4cf3-92a2-76e8f920e9fc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.917311] env[61974]: DEBUG nova.compute.provider_tree [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 944.940762] env[61974]: INFO nova.compute.manager [-] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Took 1.28 seconds to deallocate network for instance. [ 944.983739] env[61974]: INFO nova.compute.manager [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Rebuilding instance [ 945.033814] env[61974]: DEBUG nova.compute.manager [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 945.034819] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec2e39d-ad0c-4098-94b6-925fd37ffd8b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.041766] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "refresh_cache-59c72be0-46de-4cb8-93d6-0a2c70c90e2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.041766] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired lock "refresh_cache-59c72be0-46de-4cb8-93d6-0a2c70c90e2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.041766] env[61974]: DEBUG nova.network.neutron [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 945.042516] env[61974]: DEBUG nova.compute.manager [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 945.106987] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379118, 'name': Rename_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.153389] env[61974]: DEBUG oslo_vmware.api [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379119, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.202026] env[61974]: DEBUG nova.compute.manager [req-3f077825-7eb0-4274-87a7-998a1954893b req-ae12ce6e-2c4b-4063-8f8d-1e763f136f32 service nova] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Received event network-vif-deleted-e143051e-56f9-4303-833b-6e0bda6b385a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 945.202026] env[61974]: DEBUG nova.compute.manager [req-3f077825-7eb0-4274-87a7-998a1954893b req-ae12ce6e-2c4b-4063-8f8d-1e763f136f32 service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Received event network-vif-plugged-2ebf62dc-0f02-4b1b-bd8f-adc0186ae753 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 945.202026] env[61974]: DEBUG oslo_concurrency.lockutils [req-3f077825-7eb0-4274-87a7-998a1954893b req-ae12ce6e-2c4b-4063-8f8d-1e763f136f32 service nova] Acquiring lock "59c72be0-46de-4cb8-93d6-0a2c70c90e2e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.202026] env[61974]: DEBUG oslo_concurrency.lockutils [req-3f077825-7eb0-4274-87a7-998a1954893b req-ae12ce6e-2c4b-4063-8f8d-1e763f136f32 service nova] Lock "59c72be0-46de-4cb8-93d6-0a2c70c90e2e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.202026] env[61974]: DEBUG oslo_concurrency.lockutils [req-3f077825-7eb0-4274-87a7-998a1954893b req-ae12ce6e-2c4b-4063-8f8d-1e763f136f32 service nova] Lock "59c72be0-46de-4cb8-93d6-0a2c70c90e2e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.202026] env[61974]: DEBUG nova.compute.manager [req-3f077825-7eb0-4274-87a7-998a1954893b req-ae12ce6e-2c4b-4063-8f8d-1e763f136f32 service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] No waiting events found dispatching network-vif-plugged-2ebf62dc-0f02-4b1b-bd8f-adc0186ae753 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 945.202026] env[61974]: WARNING nova.compute.manager [req-3f077825-7eb0-4274-87a7-998a1954893b req-ae12ce6e-2c4b-4063-8f8d-1e763f136f32 service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Received unexpected event network-vif-plugged-2ebf62dc-0f02-4b1b-bd8f-adc0186ae753 for instance with vm_state building and task_state spawning. [ 945.202026] env[61974]: DEBUG nova.compute.manager [req-3f077825-7eb0-4274-87a7-998a1954893b req-ae12ce6e-2c4b-4063-8f8d-1e763f136f32 service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Received event network-changed-2ebf62dc-0f02-4b1b-bd8f-adc0186ae753 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 945.202026] env[61974]: DEBUG nova.compute.manager [req-3f077825-7eb0-4274-87a7-998a1954893b req-ae12ce6e-2c4b-4063-8f8d-1e763f136f32 service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Refreshing instance network info cache due to event network-changed-2ebf62dc-0f02-4b1b-bd8f-adc0186ae753. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 945.202684] env[61974]: DEBUG oslo_concurrency.lockutils [req-3f077825-7eb0-4274-87a7-998a1954893b req-ae12ce6e-2c4b-4063-8f8d-1e763f136f32 service nova] Acquiring lock "refresh_cache-59c72be0-46de-4cb8-93d6-0a2c70c90e2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.249995] env[61974]: DEBUG nova.compute.manager [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 945.276365] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': task-1379120, 'name': Rename_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.347563] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379122, 'name': ReconfigVM_Task, 'duration_secs': 0.287763} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.347912] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb/1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 945.348671] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-765e8688-dcec-4e29-bd33-be2f4f8a730a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.355522] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 945.355522] env[61974]: value = "task-1379123" [ 945.355522] env[61974]: _type = "Task" [ 945.355522] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.364573] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379123, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.420559] env[61974]: DEBUG nova.scheduler.client.report [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 945.448342] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.563602] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 945.563602] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d577d650-ecdc-4ebd-8a0c-53ad8e847380 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.571217] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 945.571217] env[61974]: value = "task-1379124" [ 945.571217] env[61974]: _type = "Task" [ 945.571217] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.580389] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379124, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.582546] env[61974]: DEBUG nova.network.neutron [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 945.600193] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379118, 'name': Rename_Task, 'duration_secs': 1.255698} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.603095] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 945.603372] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c547ffd4-c6bc-45ab-a6e2-5828e1559038 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.611098] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for the task: (returnval){ [ 945.611098] env[61974]: value = "task-1379125" [ 945.611098] env[61974]: _type = "Task" [ 945.611098] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.623317] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379125, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.655164] env[61974]: DEBUG oslo_vmware.api [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379119, 'name': PowerOffVM_Task, 'duration_secs': 1.251798} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.655584] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 945.656795] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Volume detach. Driver type: vmdk {{(pid=61974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 945.657059] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-292967', 'volume_id': '004ebd4b-70b2-40ab-8253-dc095c5312e1', 'name': 'volume-004ebd4b-70b2-40ab-8253-dc095c5312e1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1aa2a63c-e352-4c9b-9445-9b45bf3ae14c', 'attached_at': '', 'detached_at': '', 'volume_id': '004ebd4b-70b2-40ab-8253-dc095c5312e1', 'serial': '004ebd4b-70b2-40ab-8253-dc095c5312e1'} {{(pid=61974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 945.658023] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d05aa6-2934-4e7a-a63b-13b70ed86e39 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.683673] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a372ed-34b0-47f4-b988-f86a55f51bb3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.690761] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2de00a-3fd7-4ee8-9575-13c591fe3bf5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.713889] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e09bc0e-956e-4bb0-b336-ea22a53c0d3e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.729843] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] The volume has not been displaced from its original location: [datastore2] volume-004ebd4b-70b2-40ab-8253-dc095c5312e1/volume-004ebd4b-70b2-40ab-8253-dc095c5312e1.vmdk. No consolidation needed. {{(pid=61974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 945.734990] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Reconfiguring VM instance instance-00000044 to detach disk 2001 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 945.735332] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-415ff949-67fa-44d1-b41a-09987a8ccfb7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.757472] env[61974]: DEBUG oslo_vmware.api [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 945.757472] env[61974]: value = "task-1379126" [ 945.757472] env[61974]: _type = "Task" [ 945.757472] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.768479] env[61974]: DEBUG oslo_vmware.api [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379126, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.776894] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': task-1379120, 'name': Rename_Task, 'duration_secs': 1.120973} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.777869] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.778180] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 945.778461] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad3ed667-698a-490e-a08b-b552d554fcb3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.788988] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Waiting for the task: (returnval){ [ 945.788988] env[61974]: value = "task-1379127" [ 945.788988] env[61974]: _type = "Task" [ 945.788988] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.800197] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': task-1379127, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.865915] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379123, 'name': Rename_Task, 'duration_secs': 0.131545} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.866278] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 945.866547] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-94a3591d-a1cf-49d6-bf35-a79db8331da9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.871034] env[61974]: DEBUG nova.network.neutron [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Updating instance_info_cache with network_info: [{"id": "2ebf62dc-0f02-4b1b-bd8f-adc0186ae753", "address": "fa:16:3e:1d:de:e9", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ebf62dc-0f", "ovs_interfaceid": "2ebf62dc-0f02-4b1b-bd8f-adc0186ae753", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.874839] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 945.874839] env[61974]: value = "task-1379128" [ 945.874839] env[61974]: _type = "Task" [ 945.874839] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.881789] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379128, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.931140] env[61974]: DEBUG oslo_concurrency.lockutils [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.900s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.933572] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.327s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.935122] env[61974]: INFO nova.compute.claims [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 945.951364] env[61974]: INFO nova.scheduler.client.report [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Deleted allocations for instance f88f0ef2-24f2-4eef-92a3-8de2ebb6944a [ 946.065190] env[61974]: DEBUG nova.compute.manager [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 946.081458] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379124, 'name': PowerOffVM_Task, 'duration_secs': 0.145864} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.081822] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 946.082097] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 946.083038] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912a7f69-1100-4c21-a7c0-d10275620780 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.093467] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 946.095634] env[61974]: DEBUG nova.virt.hardware [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 946.095834] env[61974]: DEBUG nova.virt.hardware [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 946.096013] env[61974]: DEBUG nova.virt.hardware [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 946.096226] env[61974]: DEBUG nova.virt.hardware [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 946.096376] env[61974]: DEBUG nova.virt.hardware [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 946.096528] env[61974]: DEBUG nova.virt.hardware [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 946.096741] env[61974]: DEBUG nova.virt.hardware [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 946.096906] env[61974]: DEBUG nova.virt.hardware [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 946.097093] env[61974]: DEBUG nova.virt.hardware [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 946.097266] env[61974]: DEBUG nova.virt.hardware [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 946.097447] env[61974]: DEBUG nova.virt.hardware [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 946.097701] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6bb0f0bf-7648-4c3c-aeeb-1e29def7c075 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.099662] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83cc473-aae8-4083-8e72-532e6244ad82 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.107617] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7cbd4a4-7f70-4138-bc80-83794a8e11a9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.122011] env[61974]: DEBUG oslo_vmware.api [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379125, 'name': PowerOnVM_Task, 'duration_secs': 0.483899} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.130330] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 946.130573] env[61974]: DEBUG nova.compute.manager [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 946.132489] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2210592a-a1bc-44db-8d7c-cdcc473a8fff {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.135249] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 946.135422] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 946.135634] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Deleting the datastore file [datastore2] 45fda940-b7f0-410c-b31a-b5cd365c28fe {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 946.136248] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f1f649b-35cf-443f-ad2f-13d243af402f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.145817] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 946.145817] env[61974]: value = "task-1379130" [ 946.145817] env[61974]: _type = "Task" [ 946.145817] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.154093] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379130, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.268345] env[61974]: DEBUG oslo_vmware.api [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379126, 'name': ReconfigVM_Task, 'duration_secs': 0.309647} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.268664] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Reconfigured VM instance instance-00000044 to detach disk 2001 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 946.273543] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5194424-21d5-42dd-8714-bc3fa31fcf0e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.288753] env[61974]: DEBUG oslo_vmware.api [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 946.288753] env[61974]: value = "task-1379131" [ 946.288753] env[61974]: _type = "Task" [ 946.288753] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.302499] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': task-1379127, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.306044] env[61974]: DEBUG oslo_vmware.api [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379131, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.376221] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Releasing lock "refresh_cache-59c72be0-46de-4cb8-93d6-0a2c70c90e2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.376221] env[61974]: DEBUG nova.compute.manager [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Instance network_info: |[{"id": "2ebf62dc-0f02-4b1b-bd8f-adc0186ae753", "address": "fa:16:3e:1d:de:e9", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ebf62dc-0f", "ovs_interfaceid": "2ebf62dc-0f02-4b1b-bd8f-adc0186ae753", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 946.376221] env[61974]: DEBUG oslo_concurrency.lockutils [req-3f077825-7eb0-4274-87a7-998a1954893b req-ae12ce6e-2c4b-4063-8f8d-1e763f136f32 service nova] Acquired lock "refresh_cache-59c72be0-46de-4cb8-93d6-0a2c70c90e2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.376516] env[61974]: DEBUG nova.network.neutron [req-3f077825-7eb0-4274-87a7-998a1954893b req-ae12ce6e-2c4b-4063-8f8d-1e763f136f32 service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Refreshing network info cache for port 2ebf62dc-0f02-4b1b-bd8f-adc0186ae753 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 946.377930] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:de:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c791d09c-1086-4ee1-bcde-6ca7d259cabd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2ebf62dc-0f02-4b1b-bd8f-adc0186ae753', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 946.389955] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Creating folder: Project (2db6af28263c40708c2466226ce03009). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 946.395731] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa263012-1714-4b8a-bc2b-8b92ac4906e9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.405683] env[61974]: DEBUG oslo_vmware.api [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379128, 'name': PowerOnVM_Task, 'duration_secs': 0.529094} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.407856] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 946.408158] env[61974]: INFO nova.compute.manager [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Took 7.18 seconds to spawn the instance on the hypervisor. [ 946.408422] env[61974]: DEBUG nova.compute.manager [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 946.409390] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Created folder: Project (2db6af28263c40708c2466226ce03009) in parent group-v292912. [ 946.409390] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Creating folder: Instances. Parent ref: group-v292976. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 946.410199] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd59225-5135-4d29-9808-f4ea5a06a5b7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.413783] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58945b17-fecd-496d-b20b-4762c8f601f8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.431606] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Created folder: Instances in parent group-v292976. [ 946.431810] env[61974]: DEBUG oslo.service.loopingcall [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 946.432035] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 946.432253] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-04156b62-e20b-40c4-8c3c-7dc4bfd87614 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.460838] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 946.460838] env[61974]: value = "task-1379134" [ 946.460838] env[61974]: _type = "Task" [ 946.460838] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.461382] env[61974]: DEBUG oslo_concurrency.lockutils [None req-642c1ccd-0129-46bd-8cb7-ca5b94595c9b tempest-ServerGroupTestJSON-925935507 tempest-ServerGroupTestJSON-925935507-project-member] Lock "f88f0ef2-24f2-4eef-92a3-8de2ebb6944a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.915s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.478312] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379134, 'name': CreateVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.656649] env[61974]: DEBUG nova.network.neutron [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Successfully updated port: a342d02a-7577-428c-946f-e5725112ceec {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 946.670922] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.679698] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379130, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131027} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.679848] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 946.680091] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 946.680879] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 946.802549] env[61974]: DEBUG oslo_vmware.api [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': task-1379127, 'name': PowerOnVM_Task, 'duration_secs': 0.555339} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.805785] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 946.806028] env[61974]: INFO nova.compute.manager [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Took 13.61 seconds to spawn the instance on the hypervisor. [ 946.806239] env[61974]: DEBUG nova.compute.manager [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 946.806555] env[61974]: DEBUG oslo_vmware.api [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379131, 'name': ReconfigVM_Task, 'duration_secs': 0.170458} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.807315] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fdc2990-f722-4588-8599-45be2ea9a64c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.809865] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-292967', 'volume_id': '004ebd4b-70b2-40ab-8253-dc095c5312e1', 'name': 'volume-004ebd4b-70b2-40ab-8253-dc095c5312e1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1aa2a63c-e352-4c9b-9445-9b45bf3ae14c', 'attached_at': '', 'detached_at': '', 'volume_id': '004ebd4b-70b2-40ab-8253-dc095c5312e1', 'serial': '004ebd4b-70b2-40ab-8253-dc095c5312e1'} {{(pid=61974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 946.810171] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 946.810899] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc6e146-1bc8-4abe-9bc2-d924d9763671 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.821560] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 946.822078] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c84cdcdc-30f1-4040-8383-aee8c73df8f9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.892156] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 946.892394] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 946.892578] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Deleting the datastore file [datastore1] 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 946.892859] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b97ba1e-c7f7-42cc-b068-8d7fc3a4a56a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.904067] env[61974]: DEBUG oslo_vmware.api [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 946.904067] env[61974]: value = "task-1379136" [ 946.904067] env[61974]: _type = "Task" [ 946.904067] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.915266] env[61974]: DEBUG oslo_vmware.api [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379136, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.936971] env[61974]: INFO nova.compute.manager [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Took 23.08 seconds to build instance. [ 946.978090] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379134, 'name': CreateVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.171703] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.171996] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquired lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.172247] env[61974]: DEBUG nova.network.neutron [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 947.235909] env[61974]: DEBUG nova.network.neutron [req-3f077825-7eb0-4274-87a7-998a1954893b req-ae12ce6e-2c4b-4063-8f8d-1e763f136f32 service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Updated VIF entry in instance network info cache for port 2ebf62dc-0f02-4b1b-bd8f-adc0186ae753. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 947.235909] env[61974]: DEBUG nova.network.neutron [req-3f077825-7eb0-4274-87a7-998a1954893b req-ae12ce6e-2c4b-4063-8f8d-1e763f136f32 service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Updating instance_info_cache with network_info: [{"id": "2ebf62dc-0f02-4b1b-bd8f-adc0186ae753", "address": "fa:16:3e:1d:de:e9", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ebf62dc-0f", "ovs_interfaceid": "2ebf62dc-0f02-4b1b-bd8f-adc0186ae753", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.257356] env[61974]: DEBUG nova.compute.manager [req-eca4881d-5223-45f4-bac3-c180922b0fae req-a77761ac-213c-47f2-805e-f74d263e19f8 service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Received event network-vif-plugged-a342d02a-7577-428c-946f-e5725112ceec {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 947.257652] env[61974]: DEBUG oslo_concurrency.lockutils [req-eca4881d-5223-45f4-bac3-c180922b0fae req-a77761ac-213c-47f2-805e-f74d263e19f8 service nova] Acquiring lock "ceb0dd02-6441-4923-99f6-73f8eab86fe5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.257936] env[61974]: DEBUG oslo_concurrency.lockutils [req-eca4881d-5223-45f4-bac3-c180922b0fae req-a77761ac-213c-47f2-805e-f74d263e19f8 service nova] Lock "ceb0dd02-6441-4923-99f6-73f8eab86fe5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.258179] env[61974]: DEBUG oslo_concurrency.lockutils [req-eca4881d-5223-45f4-bac3-c180922b0fae req-a77761ac-213c-47f2-805e-f74d263e19f8 service nova] Lock "ceb0dd02-6441-4923-99f6-73f8eab86fe5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.258461] env[61974]: DEBUG nova.compute.manager [req-eca4881d-5223-45f4-bac3-c180922b0fae req-a77761ac-213c-47f2-805e-f74d263e19f8 service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] No waiting events found dispatching network-vif-plugged-a342d02a-7577-428c-946f-e5725112ceec {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 947.258709] env[61974]: WARNING nova.compute.manager [req-eca4881d-5223-45f4-bac3-c180922b0fae req-a77761ac-213c-47f2-805e-f74d263e19f8 service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Received unexpected event network-vif-plugged-a342d02a-7577-428c-946f-e5725112ceec for instance with vm_state building and task_state spawning. [ 947.258946] env[61974]: DEBUG nova.compute.manager [req-eca4881d-5223-45f4-bac3-c180922b0fae req-a77761ac-213c-47f2-805e-f74d263e19f8 service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Received event network-changed-a342d02a-7577-428c-946f-e5725112ceec {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 947.259200] env[61974]: DEBUG nova.compute.manager [req-eca4881d-5223-45f4-bac3-c180922b0fae req-a77761ac-213c-47f2-805e-f74d263e19f8 service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Refreshing instance network info cache due to event network-changed-a342d02a-7577-428c-946f-e5725112ceec. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 947.259456] env[61974]: DEBUG oslo_concurrency.lockutils [req-eca4881d-5223-45f4-bac3-c180922b0fae req-a77761ac-213c-47f2-805e-f74d263e19f8 service nova] Acquiring lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.289899] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1fb5ba-b2a5-4a9c-857f-5a7337241dc3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.302551] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad28b821-59e8-4d08-96d3-3147fa7960b0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.344442] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4da5d9-6898-4af4-9212-0eec59d28b2b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.350216] env[61974]: INFO nova.compute.manager [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Took 35.05 seconds to build instance. [ 947.355115] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612ab66a-f118-4993-9ec3-87b6e7aeeb78 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.369837] env[61974]: DEBUG nova.compute.provider_tree [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 947.414501] env[61974]: DEBUG oslo_vmware.api [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379136, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.444983} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.414834] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 947.414952] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 947.415166] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 947.415357] env[61974]: INFO nova.compute.manager [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Took 3.28 seconds to destroy the instance on the hypervisor. [ 947.415953] env[61974]: DEBUG oslo.service.loopingcall [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 947.415953] env[61974]: DEBUG nova.compute.manager [-] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 947.415953] env[61974]: DEBUG nova.network.neutron [-] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 947.439349] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a8fd2e9-4e60-4bf3-8042-af0462a9f846 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.600s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.473554] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379134, 'name': CreateVM_Task, 'duration_secs': 0.51792} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.473732] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 947.474434] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.474632] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.474995] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 947.475298] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ada881e8-d4ae-493f-9036-b9d587243258 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.480060] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 947.480060] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]522eba8c-1802-faf0-74eb-6530de097ba9" [ 947.480060] env[61974]: _type = "Task" [ 947.480060] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.488973] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]522eba8c-1802-faf0-74eb-6530de097ba9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.722975] env[61974]: DEBUG nova.network.neutron [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 947.734964] env[61974]: DEBUG nova.virt.hardware [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 947.734964] env[61974]: DEBUG nova.virt.hardware [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 947.735166] env[61974]: DEBUG nova.virt.hardware [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 947.735439] env[61974]: DEBUG nova.virt.hardware [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 947.736150] env[61974]: DEBUG nova.virt.hardware [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 947.736150] env[61974]: DEBUG nova.virt.hardware [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 947.736150] env[61974]: DEBUG nova.virt.hardware [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 947.736150] env[61974]: DEBUG nova.virt.hardware [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 947.736369] env[61974]: DEBUG nova.virt.hardware [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 947.739534] env[61974]: DEBUG nova.virt.hardware [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 947.739534] env[61974]: DEBUG nova.virt.hardware [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 947.739534] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b16ea404-4c66-41da-b344-a7b199339653 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.741010] env[61974]: DEBUG oslo_concurrency.lockutils [req-3f077825-7eb0-4274-87a7-998a1954893b req-ae12ce6e-2c4b-4063-8f8d-1e763f136f32 service nova] Releasing lock "refresh_cache-59c72be0-46de-4cb8-93d6-0a2c70c90e2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.748648] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd9c934-f8fa-4ba3-a130-ac68154ef8b0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.763610] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Instance VIF info [] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 947.769485] env[61974]: DEBUG oslo.service.loopingcall [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 947.771074] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 947.773430] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c31aaabf-c3e3-4ed4-9341-e61fc5d21083 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.785737] env[61974]: DEBUG oslo_concurrency.lockutils [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "e6feee04-8aae-4151-8187-3ef4885bcf73" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.785968] env[61974]: DEBUG oslo_concurrency.lockutils [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "e6feee04-8aae-4151-8187-3ef4885bcf73" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.793632] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 947.793632] env[61974]: value = "task-1379137" [ 947.793632] env[61974]: _type = "Task" [ 947.793632] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.798536] env[61974]: DEBUG oslo_concurrency.lockutils [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Acquiring lock "7b338210-5be8-4838-b815-8f2c6cc19ccd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.798799] env[61974]: DEBUG oslo_concurrency.lockutils [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Lock "7b338210-5be8-4838-b815-8f2c6cc19ccd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.798991] env[61974]: DEBUG oslo_concurrency.lockutils [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Acquiring lock "7b338210-5be8-4838-b815-8f2c6cc19ccd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.799217] env[61974]: DEBUG oslo_concurrency.lockutils [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Lock "7b338210-5be8-4838-b815-8f2c6cc19ccd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.799471] env[61974]: DEBUG oslo_concurrency.lockutils [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Lock "7b338210-5be8-4838-b815-8f2c6cc19ccd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.803897] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379137, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.804268] env[61974]: INFO nova.compute.manager [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Terminating instance [ 947.806043] env[61974]: DEBUG oslo_concurrency.lockutils [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Acquiring lock "refresh_cache-7b338210-5be8-4838-b815-8f2c6cc19ccd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.806168] env[61974]: DEBUG oslo_concurrency.lockutils [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Acquired lock "refresh_cache-7b338210-5be8-4838-b815-8f2c6cc19ccd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.806332] env[61974]: DEBUG nova.network.neutron [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 947.852427] env[61974]: DEBUG oslo_concurrency.lockutils [None req-80cf65ac-7bbb-42a3-a11c-41940cd58f10 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Lock "097ad079-9712-4183-9135-b15ad3a65d6d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.163s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.872993] env[61974]: DEBUG nova.scheduler.client.report [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 947.903308] env[61974]: DEBUG nova.network.neutron [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Updating instance_info_cache with network_info: [{"id": "a342d02a-7577-428c-946f-e5725112ceec", "address": "fa:16:3e:99:e8:62", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa342d02a-75", "ovs_interfaceid": "a342d02a-7577-428c-946f-e5725112ceec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.941743] env[61974]: DEBUG nova.compute.manager [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 947.991748] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]522eba8c-1802-faf0-74eb-6530de097ba9, 'name': SearchDatastore_Task, 'duration_secs': 0.047985} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.992391] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.992646] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 947.992890] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.993055] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.993247] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 947.993521] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95577837-8759-4ed6-ad43-3779fdcfca7d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.004194] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 948.004194] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 948.004714] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d70aef5b-2614-4b9e-a07f-eeb36b529e92 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.010662] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 948.010662] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52d5e7a0-3150-4f82-5cf9-9a772282754c" [ 948.010662] env[61974]: _type = "Task" [ 948.010662] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.018916] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d5e7a0-3150-4f82-5cf9-9a772282754c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.129563] env[61974]: DEBUG nova.network.neutron [-] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.303888] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379137, 'name': CreateVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.316895] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Acquiring lock "097ad079-9712-4183-9135-b15ad3a65d6d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.319348] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Lock "097ad079-9712-4183-9135-b15ad3a65d6d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.319348] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Acquiring lock "097ad079-9712-4183-9135-b15ad3a65d6d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.319348] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Lock "097ad079-9712-4183-9135-b15ad3a65d6d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.319348] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Lock "097ad079-9712-4183-9135-b15ad3a65d6d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.320213] env[61974]: INFO nova.compute.manager [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Terminating instance [ 948.321964] env[61974]: DEBUG nova.compute.manager [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 948.322201] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 948.323032] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d1dabb5-9d61-4458-9c1b-feb924892cb3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.327110] env[61974]: DEBUG nova.network.neutron [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 948.333510] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 948.333751] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-426f8f52-1a73-4886-9b20-615e5587efa8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.340126] env[61974]: DEBUG oslo_vmware.api [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Waiting for the task: (returnval){ [ 948.340126] env[61974]: value = "task-1379138" [ 948.340126] env[61974]: _type = "Task" [ 948.340126] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.350362] env[61974]: DEBUG oslo_vmware.api [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': task-1379138, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.354891] env[61974]: DEBUG nova.compute.manager [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 948.379755] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.445s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.379755] env[61974]: DEBUG nova.compute.manager [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 948.382159] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.944s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.383568] env[61974]: INFO nova.compute.claims [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 948.398685] env[61974]: DEBUG nova.network.neutron [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.406097] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Releasing lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.406690] env[61974]: DEBUG nova.compute.manager [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Instance network_info: |[{"id": "a342d02a-7577-428c-946f-e5725112ceec", "address": "fa:16:3e:99:e8:62", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa342d02a-75", "ovs_interfaceid": "a342d02a-7577-428c-946f-e5725112ceec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 948.406838] env[61974]: DEBUG oslo_concurrency.lockutils [req-eca4881d-5223-45f4-bac3-c180922b0fae req-a77761ac-213c-47f2-805e-f74d263e19f8 service nova] Acquired lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.406944] env[61974]: DEBUG nova.network.neutron [req-eca4881d-5223-45f4-bac3-c180922b0fae req-a77761ac-213c-47f2-805e-f74d263e19f8 service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Refreshing network info cache for port a342d02a-7577-428c-946f-e5725112ceec {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 948.408384] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:e8:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4db2961d-273d-4634-9d06-a94fa9d384fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a342d02a-7577-428c-946f-e5725112ceec', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 948.416227] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Creating folder: Project (102785ae1c584cdb925a55afc3412fb9). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 948.417330] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-15895dbb-e2bc-44dd-8d69-6ef730803b52 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.428908] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Created folder: Project (102785ae1c584cdb925a55afc3412fb9) in parent group-v292912. [ 948.428995] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Creating folder: Instances. Parent ref: group-v292980. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 948.429207] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e3e7210-b8bc-4f7a-ab3c-10a4042b99fd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.438714] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Created folder: Instances in parent group-v292980. [ 948.438965] env[61974]: DEBUG oslo.service.loopingcall [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 948.439731] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 948.439953] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9ed1b8ad-8232-4292-9659-00e97f7dbbb5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.466022] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 948.466022] env[61974]: value = "task-1379141" [ 948.466022] env[61974]: _type = "Task" [ 948.466022] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.474606] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379141, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.481980] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.523611] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d5e7a0-3150-4f82-5cf9-9a772282754c, 'name': SearchDatastore_Task, 'duration_secs': 0.008623} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.525161] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e7e0282-d0c1-4c2f-9e48-1e45caa24f89 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.534364] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 948.534364] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]523bd133-ed51-44ae-611c-8ca7b336efb2" [ 948.534364] env[61974]: _type = "Task" [ 948.534364] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.542590] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]523bd133-ed51-44ae-611c-8ca7b336efb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.633612] env[61974]: INFO nova.compute.manager [-] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Took 1.22 seconds to deallocate network for instance. [ 948.804928] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379137, 'name': CreateVM_Task, 'duration_secs': 0.548556} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.805247] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 948.805579] env[61974]: DEBUG oslo_concurrency.lockutils [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.805751] env[61974]: DEBUG oslo_concurrency.lockutils [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.806105] env[61974]: DEBUG oslo_concurrency.lockutils [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 948.806458] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d97f54e-80a4-41b3-83de-fdb4a54dfbfb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.811174] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 948.811174] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]529c396a-9e98-8982-8a15-8f9d66e0c6c4" [ 948.811174] env[61974]: _type = "Task" [ 948.811174] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.820879] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]529c396a-9e98-8982-8a15-8f9d66e0c6c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.852409] env[61974]: DEBUG oslo_vmware.api [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': task-1379138, 'name': PowerOffVM_Task, 'duration_secs': 0.253555} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.852409] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 948.852409] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 948.852409] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a42e7f3e-c83a-43f1-b96b-bd1bcb8cfa63 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.887882] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.889435] env[61974]: DEBUG nova.compute.utils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 948.893418] env[61974]: DEBUG nova.compute.manager [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 948.893595] env[61974]: DEBUG nova.network.neutron [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 948.903140] env[61974]: DEBUG oslo_concurrency.lockutils [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Releasing lock "refresh_cache-7b338210-5be8-4838-b815-8f2c6cc19ccd" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.903140] env[61974]: DEBUG nova.compute.manager [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 948.903140] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 948.903895] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e662e6df-64d3-4c8b-98c9-caa2188c87c3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.912639] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 948.912909] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c3415b0-aa99-4189-a53c-e1183dde213d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.920663] env[61974]: DEBUG oslo_vmware.api [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for the task: (returnval){ [ 948.920663] env[61974]: value = "task-1379143" [ 948.920663] env[61974]: _type = "Task" [ 948.920663] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.929991] env[61974]: DEBUG oslo_vmware.api [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379143, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.956359] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 948.956359] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 948.956359] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Deleting the datastore file [datastore2] 097ad079-9712-4183-9135-b15ad3a65d6d {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 948.956359] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1c53a53-a2eb-49df-a82b-4c30249a843e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.962593] env[61974]: DEBUG oslo_vmware.api [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Waiting for the task: (returnval){ [ 948.962593] env[61974]: value = "task-1379144" [ 948.962593] env[61974]: _type = "Task" [ 948.962593] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.973877] env[61974]: DEBUG oslo_vmware.api [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': task-1379144, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.977430] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379141, 'name': CreateVM_Task, 'duration_secs': 0.337335} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.979015] env[61974]: DEBUG nova.policy [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a1f9bf9b2b848c096b4aa88cf9035fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e8153f21f085460db2d0328196e2f347', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 948.980549] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 948.981195] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.044896] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]523bd133-ed51-44ae-611c-8ca7b336efb2, 'name': SearchDatastore_Task, 'duration_secs': 0.010857} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.045105] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.045376] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 59c72be0-46de-4cb8-93d6-0a2c70c90e2e/59c72be0-46de-4cb8-93d6-0a2c70c90e2e.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 949.045641] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e22cffb3-7ef5-4484-9a4b-037b43141e65 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.052514] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 949.052514] env[61974]: value = "task-1379145" [ 949.052514] env[61974]: _type = "Task" [ 949.052514] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.060240] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379145, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.183850] env[61974]: INFO nova.compute.manager [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Took 0.55 seconds to detach 1 volumes for instance. [ 949.296278] env[61974]: DEBUG nova.compute.manager [req-e9d88723-2437-494c-9f3b-df4783ff65b3 req-2f8bca7a-bcc7-4038-8d44-8706dec9aab8 service nova] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Received event network-vif-deleted-0e8f6fb0-8b0e-4320-b636-cd468b8bfda6 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 949.324596] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]529c396a-9e98-8982-8a15-8f9d66e0c6c4, 'name': SearchDatastore_Task, 'duration_secs': 0.010072} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.324941] env[61974]: DEBUG oslo_concurrency.lockutils [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.325216] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 949.325619] env[61974]: DEBUG oslo_concurrency.lockutils [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.325715] env[61974]: DEBUG oslo_concurrency.lockutils [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.325857] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 949.326260] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.330022] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 949.330022] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e3556236-524e-4f12-b3fd-fdfd745d2060 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.330022] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-264287a1-3338-4315-93aa-3ea5844b8890 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.337503] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 949.337503] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52d092e7-5313-52b0-e5f6-92a7ff00f72a" [ 949.337503] env[61974]: _type = "Task" [ 949.337503] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.345376] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 949.345592] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 949.351257] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51128ff9-de83-443d-b34e-4223cf86d0b4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.352020] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d092e7-5313-52b0-e5f6-92a7ff00f72a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.355665] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 949.355665] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52428f54-295b-b67a-8976-1be3ea78514c" [ 949.355665] env[61974]: _type = "Task" [ 949.355665] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.364520] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52428f54-295b-b67a-8976-1be3ea78514c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.394858] env[61974]: DEBUG nova.compute.manager [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 949.439473] env[61974]: DEBUG oslo_vmware.api [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379143, 'name': PowerOffVM_Task, 'duration_secs': 0.155289} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.443451] env[61974]: DEBUG nova.network.neutron [req-eca4881d-5223-45f4-bac3-c180922b0fae req-a77761ac-213c-47f2-805e-f74d263e19f8 service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Updated VIF entry in instance network info cache for port a342d02a-7577-428c-946f-e5725112ceec. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 949.444105] env[61974]: DEBUG nova.network.neutron [req-eca4881d-5223-45f4-bac3-c180922b0fae req-a77761ac-213c-47f2-805e-f74d263e19f8 service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Updating instance_info_cache with network_info: [{"id": "a342d02a-7577-428c-946f-e5725112ceec", "address": "fa:16:3e:99:e8:62", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa342d02a-75", "ovs_interfaceid": "a342d02a-7577-428c-946f-e5725112ceec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.446195] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 949.446453] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 949.450444] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6a68c37-e6ae-4867-85fd-b25505ec9046 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.481511] env[61974]: DEBUG oslo_vmware.api [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Task: {'id': task-1379144, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172504} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.483827] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 949.484340] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 949.485431] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 949.485654] env[61974]: INFO nova.compute.manager [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 949.487310] env[61974]: DEBUG oslo.service.loopingcall [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 949.487310] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 949.487310] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 949.487310] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Deleting the datastore file [datastore2] 7b338210-5be8-4838-b815-8f2c6cc19ccd {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 949.487310] env[61974]: DEBUG nova.compute.manager [-] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 949.487568] env[61974]: DEBUG nova.network.neutron [-] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 949.489869] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e04b443-2728-4855-a927-371ce5ddb808 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.498949] env[61974]: DEBUG oslo_vmware.api [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for the task: (returnval){ [ 949.498949] env[61974]: value = "task-1379147" [ 949.498949] env[61974]: _type = "Task" [ 949.498949] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.511152] env[61974]: DEBUG oslo_vmware.api [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379147, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.563764] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379145, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.661196] env[61974]: DEBUG nova.network.neutron [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Successfully created port: 8004aa93-735a-4494-97c5-cdc9e33eedb9 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 949.695103] env[61974]: DEBUG oslo_concurrency.lockutils [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.787016] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42fa0b93-c91e-43f1-a52f-bad653908f92 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.794816] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1c4460-a93a-4735-80a2-b4091b44bb46 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.824714] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924e442b-24e0-46fc-9f35-f1b4d4f9e3d3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.831925] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf98467-a6fa-4d16-b42b-356ce9de6fea {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.848676] env[61974]: DEBUG nova.compute.provider_tree [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 949.854790] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d092e7-5313-52b0-e5f6-92a7ff00f72a, 'name': SearchDatastore_Task, 'duration_secs': 0.057402} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.856607] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.856607] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 949.856607] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.864956] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52428f54-295b-b67a-8976-1be3ea78514c, 'name': SearchDatastore_Task, 'duration_secs': 0.055026} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.865677] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bf020a9-8baf-4474-827e-b5c1260174cb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.870183] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 949.870183] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52fecaf5-6172-d2a3-42ac-b08e3a33324f" [ 949.870183] env[61974]: _type = "Task" [ 949.870183] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.877207] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52fecaf5-6172-d2a3-42ac-b08e3a33324f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.954399] env[61974]: DEBUG oslo_concurrency.lockutils [req-eca4881d-5223-45f4-bac3-c180922b0fae req-a77761ac-213c-47f2-805e-f74d263e19f8 service nova] Releasing lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.008731] env[61974]: DEBUG oslo_vmware.api [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Task: {'id': task-1379147, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.212918} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.009091] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 950.009204] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 950.009389] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 950.009571] env[61974]: INFO nova.compute.manager [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Took 1.11 seconds to destroy the instance on the hypervisor. [ 950.009821] env[61974]: DEBUG oslo.service.loopingcall [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 950.010031] env[61974]: DEBUG nova.compute.manager [-] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 950.010137] env[61974]: DEBUG nova.network.neutron [-] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 950.030106] env[61974]: DEBUG nova.network.neutron [-] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 950.063587] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379145, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.555536} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.063934] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 59c72be0-46de-4cb8-93d6-0a2c70c90e2e/59c72be0-46de-4cb8-93d6-0a2c70c90e2e.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 950.064104] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 950.064372] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1873cfdd-c984-4035-a407-3ae1100ae49d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.071152] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 950.071152] env[61974]: value = "task-1379148" [ 950.071152] env[61974]: _type = "Task" [ 950.071152] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.078991] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379148, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.351801] env[61974]: DEBUG nova.scheduler.client.report [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 950.380528] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52fecaf5-6172-d2a3-42ac-b08e3a33324f, 'name': SearchDatastore_Task, 'duration_secs': 0.042033} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.381683] env[61974]: DEBUG oslo_concurrency.lockutils [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.381683] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 45fda940-b7f0-410c-b31a-b5cd365c28fe/45fda940-b7f0-410c-b31a-b5cd365c28fe.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 950.381683] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.381880] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 950.382017] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-328e24e6-0af9-4cfa-b9a9-14197102defb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.384141] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-24d16224-7eae-4d1c-a1ed-5986f1c32daa {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.392257] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 950.392257] env[61974]: value = "task-1379149" [ 950.392257] env[61974]: _type = "Task" [ 950.392257] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.401099] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 950.401318] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 950.402082] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379149, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.402427] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2eef2419-a44e-49a4-b6f0-dbb3bb038cc5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.407270] env[61974]: DEBUG nova.compute.manager [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 950.410502] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 950.410502] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52b773ed-f687-32ad-db73-c038dbdac549" [ 950.410502] env[61974]: _type = "Task" [ 950.410502] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.417666] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52b773ed-f687-32ad-db73-c038dbdac549, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.433860] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 950.434143] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 950.434307] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 950.434497] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 950.434650] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 950.434846] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 950.435059] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 950.435288] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 950.435473] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 950.435646] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 950.435827] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 950.436669] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ecce4c-c101-471c-b99f-c8f6da7e767a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.445804] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9ee744-4125-4b40-bf2f-a73b03b4944f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.533635] env[61974]: DEBUG nova.network.neutron [-] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.580447] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379148, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078665} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.580753] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 950.581678] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ba8c59-46bf-476a-8451-8652d387be8a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.603538] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 59c72be0-46de-4cb8-93d6-0a2c70c90e2e/59c72be0-46de-4cb8-93d6-0a2c70c90e2e.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 950.603827] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3104ba7b-a339-4936-95f6-70454be6fbf0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.624396] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 950.624396] env[61974]: value = "task-1379150" [ 950.624396] env[61974]: _type = "Task" [ 950.624396] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.636084] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379150, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.663659] env[61974]: DEBUG nova.network.neutron [-] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.856489] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.474s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.857026] env[61974]: DEBUG nova.compute.manager [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 950.859980] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.501s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.861885] env[61974]: INFO nova.compute.claims [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 950.902733] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379149, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.920142] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52b773ed-f687-32ad-db73-c038dbdac549, 'name': SearchDatastore_Task, 'duration_secs': 0.029538} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.920890] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b51483d-4860-452d-9508-29e577bde3a0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.925625] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 950.925625] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]523fb637-956a-6584-619f-ee4fae051f95" [ 950.925625] env[61974]: _type = "Task" [ 950.925625] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.932738] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]523fb637-956a-6584-619f-ee4fae051f95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.036583] env[61974]: INFO nova.compute.manager [-] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Took 1.03 seconds to deallocate network for instance. [ 951.136569] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379150, 'name': ReconfigVM_Task, 'duration_secs': 0.271057} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.136914] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 59c72be0-46de-4cb8-93d6-0a2c70c90e2e/59c72be0-46de-4cb8-93d6-0a2c70c90e2e.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 951.137687] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-654e5fe3-5171-4ba2-a5ee-397aa52c1a98 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.144647] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 951.144647] env[61974]: value = "task-1379151" [ 951.144647] env[61974]: _type = "Task" [ 951.144647] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.156773] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379151, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.166559] env[61974]: INFO nova.compute.manager [-] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Took 1.68 seconds to deallocate network for instance. [ 951.334425] env[61974]: DEBUG nova.compute.manager [req-f49592fc-d405-4286-b6d6-7035f6e2a35b req-ded20cb4-8f40-4108-85bd-0c3cc00ed2b9 service nova] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Received event network-vif-deleted-ce99556d-de01-4549-9f98-e0e52d4a0b16 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 951.334805] env[61974]: DEBUG nova.compute.manager [req-f49592fc-d405-4286-b6d6-7035f6e2a35b req-ded20cb4-8f40-4108-85bd-0c3cc00ed2b9 service nova] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Received event network-vif-deleted-27cf697b-5e9a-4214-907e-4bd03824c8fa {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 951.335040] env[61974]: DEBUG nova.compute.manager [req-f49592fc-d405-4286-b6d6-7035f6e2a35b req-ded20cb4-8f40-4108-85bd-0c3cc00ed2b9 service nova] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Received event network-vif-plugged-8004aa93-735a-4494-97c5-cdc9e33eedb9 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 951.336999] env[61974]: DEBUG oslo_concurrency.lockutils [req-f49592fc-d405-4286-b6d6-7035f6e2a35b req-ded20cb4-8f40-4108-85bd-0c3cc00ed2b9 service nova] Acquiring lock "eb6dfd21-0ba6-455c-b14e-80dacaf6b92c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.336999] env[61974]: DEBUG oslo_concurrency.lockutils [req-f49592fc-d405-4286-b6d6-7035f6e2a35b req-ded20cb4-8f40-4108-85bd-0c3cc00ed2b9 service nova] Lock "eb6dfd21-0ba6-455c-b14e-80dacaf6b92c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.336999] env[61974]: DEBUG oslo_concurrency.lockutils [req-f49592fc-d405-4286-b6d6-7035f6e2a35b req-ded20cb4-8f40-4108-85bd-0c3cc00ed2b9 service nova] Lock "eb6dfd21-0ba6-455c-b14e-80dacaf6b92c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.336999] env[61974]: DEBUG nova.compute.manager [req-f49592fc-d405-4286-b6d6-7035f6e2a35b req-ded20cb4-8f40-4108-85bd-0c3cc00ed2b9 service nova] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] No waiting events found dispatching network-vif-plugged-8004aa93-735a-4494-97c5-cdc9e33eedb9 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 951.336999] env[61974]: WARNING nova.compute.manager [req-f49592fc-d405-4286-b6d6-7035f6e2a35b req-ded20cb4-8f40-4108-85bd-0c3cc00ed2b9 service nova] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Received unexpected event network-vif-plugged-8004aa93-735a-4494-97c5-cdc9e33eedb9 for instance with vm_state building and task_state spawning. [ 951.366232] env[61974]: DEBUG nova.compute.utils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 951.370432] env[61974]: DEBUG nova.compute.manager [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 951.370432] env[61974]: DEBUG nova.network.neutron [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 951.410316] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379149, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.444663] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]523fb637-956a-6584-619f-ee4fae051f95, 'name': SearchDatastore_Task, 'duration_secs': 0.046625} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.444971] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.445258] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] ceb0dd02-6441-4923-99f6-73f8eab86fe5/ceb0dd02-6441-4923-99f6-73f8eab86fe5.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 951.445541] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7396fae-307b-4fb6-96df-56c51cf8f0a0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.455571] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 951.455571] env[61974]: value = "task-1379152" [ 951.455571] env[61974]: _type = "Task" [ 951.455571] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.462307] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379152, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.546939] env[61974]: DEBUG oslo_concurrency.lockutils [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.632952] env[61974]: DEBUG nova.policy [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a1f9bf9b2b848c096b4aa88cf9035fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e8153f21f085460db2d0328196e2f347', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 951.654925] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379151, 'name': Rename_Task, 'duration_secs': 0.16352} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.655230] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 951.655482] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-90969be2-0f22-4693-ae26-419a68a74577 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.662731] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 951.662731] env[61974]: value = "task-1379153" [ 951.662731] env[61974]: _type = "Task" [ 951.662731] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.668484] env[61974]: DEBUG nova.network.neutron [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Successfully updated port: 8004aa93-735a-4494-97c5-cdc9e33eedb9 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 951.673723] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.674291] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379153, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.876407] env[61974]: DEBUG nova.compute.manager [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 951.903924] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379149, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.1251} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.904464] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 45fda940-b7f0-410c-b31a-b5cd365c28fe/45fda940-b7f0-410c-b31a-b5cd365c28fe.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 951.904856] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 951.905261] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-931a071b-a2a9-4287-a0eb-1b7b986091d1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.914798] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 951.914798] env[61974]: value = "task-1379154" [ 951.914798] env[61974]: _type = "Task" [ 951.914798] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.925154] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379154, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.968229] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379152, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.987494] env[61974]: DEBUG nova.network.neutron [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Successfully created port: c1223bcf-9d3f-4e7a-8dee-1bdfb774e108 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 952.168772] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "refresh_cache-eb6dfd21-0ba6-455c-b14e-80dacaf6b92c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 952.168937] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquired lock "refresh_cache-eb6dfd21-0ba6-455c-b14e-80dacaf6b92c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.169107] env[61974]: DEBUG nova.network.neutron [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 952.180193] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379153, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.213341] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817d913b-edea-4302-9d7f-668a8faa5901 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.223011] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e18c92-6fcb-4c96-9770-9a8033e19898 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.258606] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc85d59-47ec-45d9-9e21-49797d481073 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.269179] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64150157-780c-40d5-b03a-a9a034d731dc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.285549] env[61974]: DEBUG nova.compute.provider_tree [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.425959] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379154, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068351} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.426274] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 952.427046] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c12e843e-ea30-4d50-bce3-393d5c8bc26f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.445828] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 45fda940-b7f0-410c-b31a-b5cd365c28fe/45fda940-b7f0-410c-b31a-b5cd365c28fe.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 952.446292] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42103956-dad9-4d6e-a0c3-c8f9cf53fa00 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.468751] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379152, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.469730] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 952.469730] env[61974]: value = "task-1379155" [ 952.469730] env[61974]: _type = "Task" [ 952.469730] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.476387] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379155, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.674896] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379153, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.727288] env[61974]: DEBUG nova.network.neutron [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 952.788966] env[61974]: DEBUG nova.scheduler.client.report [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 952.889553] env[61974]: DEBUG nova.compute.manager [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 952.916804] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 952.917472] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 952.917472] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 952.917472] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 952.917675] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 952.917722] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 952.917906] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 952.918081] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 952.918307] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 952.918525] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 952.918715] env[61974]: DEBUG nova.virt.hardware [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 952.919639] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab3204f-dbce-4698-8515-76f8c2f2249c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.927624] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c261886-0603-440d-90e2-c494465fb8d0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.934039] env[61974]: DEBUG nova.network.neutron [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Updating instance_info_cache with network_info: [{"id": "8004aa93-735a-4494-97c5-cdc9e33eedb9", "address": "fa:16:3e:c7:a1:11", "network": {"id": "424f49c1-4580-4987-b137-318add429df8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1883911470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e8153f21f085460db2d0328196e2f347", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8004aa93-73", "ovs_interfaceid": "8004aa93-735a-4494-97c5-cdc9e33eedb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.969871] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379152, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.072752} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.970266] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] ceb0dd02-6441-4923-99f6-73f8eab86fe5/ceb0dd02-6441-4923-99f6-73f8eab86fe5.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 952.970481] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 952.973536] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a1ff2deb-82fe-486d-9922-e2acbc848dd5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.980643] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379155, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.981923] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 952.981923] env[61974]: value = "task-1379156" [ 952.981923] env[61974]: _type = "Task" [ 952.981923] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.989629] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379156, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.172922] env[61974]: DEBUG oslo_vmware.api [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379153, 'name': PowerOnVM_Task, 'duration_secs': 1.488364} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.173206] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 953.173420] env[61974]: INFO nova.compute.manager [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Took 9.59 seconds to spawn the instance on the hypervisor. [ 953.173604] env[61974]: DEBUG nova.compute.manager [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 953.174370] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e12e60f-07c1-456f-a84e-11d91bc3ad09 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.295310] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.435s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.295761] env[61974]: DEBUG nova.compute.manager [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 953.298457] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.495s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.299851] env[61974]: INFO nova.compute.claims [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 953.358567] env[61974]: DEBUG nova.compute.manager [req-56f88e60-99a5-44ff-b83e-5248ce5b41a4 req-a377b2ac-449b-47e1-b204-33de09706dcb service nova] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Received event network-changed-8004aa93-735a-4494-97c5-cdc9e33eedb9 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 953.358852] env[61974]: DEBUG nova.compute.manager [req-56f88e60-99a5-44ff-b83e-5248ce5b41a4 req-a377b2ac-449b-47e1-b204-33de09706dcb service nova] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Refreshing instance network info cache due to event network-changed-8004aa93-735a-4494-97c5-cdc9e33eedb9. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 953.359160] env[61974]: DEBUG oslo_concurrency.lockutils [req-56f88e60-99a5-44ff-b83e-5248ce5b41a4 req-a377b2ac-449b-47e1-b204-33de09706dcb service nova] Acquiring lock "refresh_cache-eb6dfd21-0ba6-455c-b14e-80dacaf6b92c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.414943] env[61974]: DEBUG nova.compute.manager [req-a48e206f-b4de-4284-b92d-4f7d68895864 req-1610a436-2370-43bc-a5c2-e789785338e4 service nova] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Received event network-vif-plugged-c1223bcf-9d3f-4e7a-8dee-1bdfb774e108 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 953.415195] env[61974]: DEBUG oslo_concurrency.lockutils [req-a48e206f-b4de-4284-b92d-4f7d68895864 req-1610a436-2370-43bc-a5c2-e789785338e4 service nova] Acquiring lock "a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.415416] env[61974]: DEBUG oslo_concurrency.lockutils [req-a48e206f-b4de-4284-b92d-4f7d68895864 req-1610a436-2370-43bc-a5c2-e789785338e4 service nova] Lock "a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.415598] env[61974]: DEBUG oslo_concurrency.lockutils [req-a48e206f-b4de-4284-b92d-4f7d68895864 req-1610a436-2370-43bc-a5c2-e789785338e4 service nova] Lock "a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.415775] env[61974]: DEBUG nova.compute.manager [req-a48e206f-b4de-4284-b92d-4f7d68895864 req-1610a436-2370-43bc-a5c2-e789785338e4 service nova] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] No waiting events found dispatching network-vif-plugged-c1223bcf-9d3f-4e7a-8dee-1bdfb774e108 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 953.415947] env[61974]: WARNING nova.compute.manager [req-a48e206f-b4de-4284-b92d-4f7d68895864 req-1610a436-2370-43bc-a5c2-e789785338e4 service nova] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Received unexpected event network-vif-plugged-c1223bcf-9d3f-4e7a-8dee-1bdfb774e108 for instance with vm_state building and task_state spawning. [ 953.436820] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Releasing lock "refresh_cache-eb6dfd21-0ba6-455c-b14e-80dacaf6b92c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.437142] env[61974]: DEBUG nova.compute.manager [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Instance network_info: |[{"id": "8004aa93-735a-4494-97c5-cdc9e33eedb9", "address": "fa:16:3e:c7:a1:11", "network": {"id": "424f49c1-4580-4987-b137-318add429df8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1883911470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e8153f21f085460db2d0328196e2f347", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8004aa93-73", "ovs_interfaceid": "8004aa93-735a-4494-97c5-cdc9e33eedb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 953.437562] env[61974]: DEBUG oslo_concurrency.lockutils [req-56f88e60-99a5-44ff-b83e-5248ce5b41a4 req-a377b2ac-449b-47e1-b204-33de09706dcb service nova] Acquired lock "refresh_cache-eb6dfd21-0ba6-455c-b14e-80dacaf6b92c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.437775] env[61974]: DEBUG nova.network.neutron [req-56f88e60-99a5-44ff-b83e-5248ce5b41a4 req-a377b2ac-449b-47e1-b204-33de09706dcb service nova] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Refreshing network info cache for port 8004aa93-735a-4494-97c5-cdc9e33eedb9 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 953.439032] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:a1:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8004aa93-735a-4494-97c5-cdc9e33eedb9', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 953.447989] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Creating folder: Project (e8153f21f085460db2d0328196e2f347). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 953.451641] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-feaf9908-ab33-4583-afce-c5f1511b8dc4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.462050] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Created folder: Project (e8153f21f085460db2d0328196e2f347) in parent group-v292912. [ 953.462257] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Creating folder: Instances. Parent ref: group-v292983. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 953.462504] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a0b8e1fd-f6da-4383-8661-a4fd5d9d2e09 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.474919] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Created folder: Instances in parent group-v292983. [ 953.475215] env[61974]: DEBUG oslo.service.loopingcall [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 953.475724] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 953.475940] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ba24c742-3465-4612-a9d7-c8662ac56994 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.500592] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379155, 'name': ReconfigVM_Task, 'duration_secs': 0.535797} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.505166] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 45fda940-b7f0-410c-b31a-b5cd365c28fe/45fda940-b7f0-410c-b31a-b5cd365c28fe.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 953.505826] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b8f41f9c-51e4-4fd6-bbae-eaac0ca597f3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.509296] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 953.509296] env[61974]: value = "task-1379159" [ 953.509296] env[61974]: _type = "Task" [ 953.509296] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.512513] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379156, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065314} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.518695] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 953.519039] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 953.519039] env[61974]: value = "task-1379160" [ 953.519039] env[61974]: _type = "Task" [ 953.519039] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.519737] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d304f842-7822-4b1d-b666-e7e429ca32e7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.528392] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379159, 'name': CreateVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.548457] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] ceb0dd02-6441-4923-99f6-73f8eab86fe5/ceb0dd02-6441-4923-99f6-73f8eab86fe5.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 953.552633] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe04a0ed-0b3f-4f84-b315-62acbca2df43 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.567450] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379160, 'name': Rename_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.576611] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 953.576611] env[61974]: value = "task-1379161" [ 953.576611] env[61974]: _type = "Task" [ 953.576611] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.585167] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379161, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.692337] env[61974]: INFO nova.compute.manager [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Took 24.47 seconds to build instance. [ 953.739031] env[61974]: DEBUG nova.network.neutron [req-56f88e60-99a5-44ff-b83e-5248ce5b41a4 req-a377b2ac-449b-47e1-b204-33de09706dcb service nova] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Updated VIF entry in instance network info cache for port 8004aa93-735a-4494-97c5-cdc9e33eedb9. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 953.739416] env[61974]: DEBUG nova.network.neutron [req-56f88e60-99a5-44ff-b83e-5248ce5b41a4 req-a377b2ac-449b-47e1-b204-33de09706dcb service nova] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Updating instance_info_cache with network_info: [{"id": "8004aa93-735a-4494-97c5-cdc9e33eedb9", "address": "fa:16:3e:c7:a1:11", "network": {"id": "424f49c1-4580-4987-b137-318add429df8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1883911470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e8153f21f085460db2d0328196e2f347", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8004aa93-73", "ovs_interfaceid": "8004aa93-735a-4494-97c5-cdc9e33eedb9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.803813] env[61974]: DEBUG nova.compute.utils [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 953.807084] env[61974]: DEBUG nova.compute.manager [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 953.807259] env[61974]: DEBUG nova.network.neutron [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 953.852290] env[61974]: DEBUG nova.policy [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '139f2fab7d4c492ab0d6fb16ea947457', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4757d24b61794cfcaefff2ad44e02b74', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 954.022555] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379159, 'name': CreateVM_Task, 'duration_secs': 0.365467} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.026870] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 954.027635] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.027808] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.028146] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 954.028861] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-347d0d59-8675-4980-99c3-43d8c72de898 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.034810] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379160, 'name': Rename_Task, 'duration_secs': 0.21323} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.035446] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 954.035719] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67b79b20-e455-403c-adf7-910b6efafe88 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.038981] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for the task: (returnval){ [ 954.038981] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52b4ae9e-e151-5116-7019-266e3423d5f5" [ 954.038981] env[61974]: _type = "Task" [ 954.038981] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.044280] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 954.044280] env[61974]: value = "task-1379162" [ 954.044280] env[61974]: _type = "Task" [ 954.044280] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.050521] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52b4ae9e-e151-5116-7019-266e3423d5f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.055786] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379162, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.089121] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379161, 'name': ReconfigVM_Task, 'duration_secs': 0.364034} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.089411] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Reconfigured VM instance instance-0000004c to attach disk [datastore1] ceb0dd02-6441-4923-99f6-73f8eab86fe5/ceb0dd02-6441-4923-99f6-73f8eab86fe5.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 954.090064] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9e87ca37-e14d-46c5-ade0-fb1ad50e2734 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.096558] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 954.096558] env[61974]: value = "task-1379163" [ 954.096558] env[61974]: _type = "Task" [ 954.096558] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.106762] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379163, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.121378] env[61974]: DEBUG nova.network.neutron [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Successfully updated port: c1223bcf-9d3f-4e7a-8dee-1bdfb774e108 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 954.194168] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0101f6dc-5751-47b2-8054-a4b4a323c79a tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "59c72be0-46de-4cb8-93d6-0a2c70c90e2e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.492s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.243086] env[61974]: DEBUG oslo_concurrency.lockutils [req-56f88e60-99a5-44ff-b83e-5248ce5b41a4 req-a377b2ac-449b-47e1-b204-33de09706dcb service nova] Releasing lock "refresh_cache-eb6dfd21-0ba6-455c-b14e-80dacaf6b92c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.279694] env[61974]: DEBUG nova.network.neutron [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Successfully created port: 1b95be55-2118-4629-b9c8-0063c54f9e94 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 954.307929] env[61974]: DEBUG nova.compute.manager [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 954.553286] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52b4ae9e-e151-5116-7019-266e3423d5f5, 'name': SearchDatastore_Task, 'duration_secs': 0.020263} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.554158] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.554412] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 954.554670] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.554827] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.555120] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 954.555449] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb366729-8678-4e26-8f84-20682c4571d1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.559972] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379162, 'name': PowerOnVM_Task} progress is 78%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.566109] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 954.566260] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 954.566957] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d1925cc-214c-4d84-a62b-df5e3cb0a89b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.571523] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for the task: (returnval){ [ 954.571523] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]523ce9eb-c51a-eb3d-46ed-dcf2198bdfc6" [ 954.571523] env[61974]: _type = "Task" [ 954.571523] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.581643] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]523ce9eb-c51a-eb3d-46ed-dcf2198bdfc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.607121] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379163, 'name': Rename_Task, 'duration_secs': 0.15213} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.607354] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 954.607596] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2480b296-f959-47f6-a38b-aaa3cd557391 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.612075] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b67b70-11d7-464a-a498-cb3225b7210e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.615384] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 954.615384] env[61974]: value = "task-1379164" [ 954.615384] env[61974]: _type = "Task" [ 954.615384] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.623700] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-309a6bcd-d0ab-4607-8c4c-cee387380f09 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.626693] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379164, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.627204] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "refresh_cache-a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.627336] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquired lock "refresh_cache-a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.627497] env[61974]: DEBUG nova.network.neutron [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 954.660722] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4bde625-e4dd-44dc-91ed-5da269616159 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.670395] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf2b5f92-76b6-4241-96eb-91753b8d82d6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.675983] env[61974]: DEBUG nova.compute.manager [req-c3fe457c-206e-4876-8669-25a7a9c40c73 req-f5c57774-aa01-4b18-9d58-f98cb004ff86 service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Received event network-changed-2ebf62dc-0f02-4b1b-bd8f-adc0186ae753 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 954.676396] env[61974]: DEBUG nova.compute.manager [req-c3fe457c-206e-4876-8669-25a7a9c40c73 req-f5c57774-aa01-4b18-9d58-f98cb004ff86 service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Refreshing instance network info cache due to event network-changed-2ebf62dc-0f02-4b1b-bd8f-adc0186ae753. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 954.677103] env[61974]: DEBUG oslo_concurrency.lockutils [req-c3fe457c-206e-4876-8669-25a7a9c40c73 req-f5c57774-aa01-4b18-9d58-f98cb004ff86 service nova] Acquiring lock "refresh_cache-59c72be0-46de-4cb8-93d6-0a2c70c90e2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.677103] env[61974]: DEBUG oslo_concurrency.lockutils [req-c3fe457c-206e-4876-8669-25a7a9c40c73 req-f5c57774-aa01-4b18-9d58-f98cb004ff86 service nova] Acquired lock "refresh_cache-59c72be0-46de-4cb8-93d6-0a2c70c90e2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.677103] env[61974]: DEBUG nova.network.neutron [req-c3fe457c-206e-4876-8669-25a7a9c40c73 req-f5c57774-aa01-4b18-9d58-f98cb004ff86 service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Refreshing network info cache for port 2ebf62dc-0f02-4b1b-bd8f-adc0186ae753 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 954.692294] env[61974]: DEBUG nova.compute.provider_tree [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 954.697091] env[61974]: DEBUG nova.compute.manager [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 955.057531] env[61974]: DEBUG oslo_vmware.api [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379162, 'name': PowerOnVM_Task, 'duration_secs': 0.780217} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.059124] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 955.059124] env[61974]: DEBUG nova.compute.manager [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 955.059476] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b45073f0-325b-4244-aa24-2d2caa7f1ba4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.084555] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]523ce9eb-c51a-eb3d-46ed-dcf2198bdfc6, 'name': SearchDatastore_Task, 'duration_secs': 0.00878} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.086237] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cf26df9-c5d6-4876-bdd0-313ebedec65d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.092516] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for the task: (returnval){ [ 955.092516] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52afa85e-4676-a2fa-d20e-9c1036997b02" [ 955.092516] env[61974]: _type = "Task" [ 955.092516] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.101132] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52afa85e-4676-a2fa-d20e-9c1036997b02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.125673] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379164, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.161456] env[61974]: DEBUG nova.network.neutron [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 955.195623] env[61974]: DEBUG nova.scheduler.client.report [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 955.219505] env[61974]: DEBUG oslo_concurrency.lockutils [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.321413] env[61974]: DEBUG nova.compute.manager [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 955.360538] env[61974]: DEBUG nova.virt.hardware [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 955.361082] env[61974]: DEBUG nova.virt.hardware [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 955.363017] env[61974]: DEBUG nova.virt.hardware [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 955.363017] env[61974]: DEBUG nova.virt.hardware [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 955.363017] env[61974]: DEBUG nova.virt.hardware [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 955.363017] env[61974]: DEBUG nova.virt.hardware [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 955.363017] env[61974]: DEBUG nova.virt.hardware [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 955.363017] env[61974]: DEBUG nova.virt.hardware [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 955.363378] env[61974]: DEBUG nova.virt.hardware [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 955.363643] env[61974]: DEBUG nova.virt.hardware [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 955.363842] env[61974]: DEBUG nova.virt.hardware [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 955.365250] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-264619d2-2834-436a-8ae0-370e6042f518 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.377952] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0223726-1f4b-4b16-a9e6-1cec4f79d100 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.385597] env[61974]: DEBUG nova.network.neutron [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Updating instance_info_cache with network_info: [{"id": "c1223bcf-9d3f-4e7a-8dee-1bdfb774e108", "address": "fa:16:3e:45:b1:31", "network": {"id": "424f49c1-4580-4987-b137-318add429df8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1883911470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e8153f21f085460db2d0328196e2f347", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1223bcf-9d", "ovs_interfaceid": "c1223bcf-9d3f-4e7a-8dee-1bdfb774e108", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.493870] env[61974]: DEBUG nova.compute.manager [req-cd62cf33-938d-4421-974a-ef6622306110 req-4e3ab561-8d8d-4c63-a82a-1904990b3eaf service nova] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Received event network-changed-c1223bcf-9d3f-4e7a-8dee-1bdfb774e108 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 955.494085] env[61974]: DEBUG nova.compute.manager [req-cd62cf33-938d-4421-974a-ef6622306110 req-4e3ab561-8d8d-4c63-a82a-1904990b3eaf service nova] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Refreshing instance network info cache due to event network-changed-c1223bcf-9d3f-4e7a-8dee-1bdfb774e108. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 955.494710] env[61974]: DEBUG oslo_concurrency.lockutils [req-cd62cf33-938d-4421-974a-ef6622306110 req-4e3ab561-8d8d-4c63-a82a-1904990b3eaf service nova] Acquiring lock "refresh_cache-a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.555514] env[61974]: DEBUG nova.network.neutron [req-c3fe457c-206e-4876-8669-25a7a9c40c73 req-f5c57774-aa01-4b18-9d58-f98cb004ff86 service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Updated VIF entry in instance network info cache for port 2ebf62dc-0f02-4b1b-bd8f-adc0186ae753. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 955.555903] env[61974]: DEBUG nova.network.neutron [req-c3fe457c-206e-4876-8669-25a7a9c40c73 req-f5c57774-aa01-4b18-9d58-f98cb004ff86 service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Updating instance_info_cache with network_info: [{"id": "2ebf62dc-0f02-4b1b-bd8f-adc0186ae753", "address": "fa:16:3e:1d:de:e9", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ebf62dc-0f", "ovs_interfaceid": "2ebf62dc-0f02-4b1b-bd8f-adc0186ae753", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.579345] env[61974]: DEBUG oslo_concurrency.lockutils [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.605031] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52afa85e-4676-a2fa-d20e-9c1036997b02, 'name': SearchDatastore_Task, 'duration_secs': 0.01975} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.606027] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.606027] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] eb6dfd21-0ba6-455c-b14e-80dacaf6b92c/eb6dfd21-0ba6-455c-b14e-80dacaf6b92c.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 955.607040] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-995d06fb-c90c-4c9c-a119-59cf4a00e279 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.614894] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for the task: (returnval){ [ 955.614894] env[61974]: value = "task-1379165" [ 955.614894] env[61974]: _type = "Task" [ 955.614894] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.631720] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379165, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.634960] env[61974]: DEBUG oslo_vmware.api [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379164, 'name': PowerOnVM_Task, 'duration_secs': 0.528306} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.635811] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 955.636175] env[61974]: INFO nova.compute.manager [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Took 9.57 seconds to spawn the instance on the hypervisor. [ 955.636308] env[61974]: DEBUG nova.compute.manager [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 955.637434] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2118fe24-f356-4596-b2f7-7649bc209dbd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.701614] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.403s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.702408] env[61974]: DEBUG nova.compute.manager [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 955.705359] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 18.821s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.705629] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.705841] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 955.706271] env[61974]: DEBUG oslo_concurrency.lockutils [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 16.850s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.706613] env[61974]: DEBUG nova.objects.instance [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61974) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 955.711014] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c2e2dd-2865-4ab5-9d60-ecab07535149 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.720647] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-238b7951-c5d1-4b27-8055-b7e6d49bc76c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.735277] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-553f6af8-0eab-412e-b896-228ba1eb5fe4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.742900] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ff24be-c40c-401c-bac2-d7d53b163afc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.777563] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181354MB free_disk=177GB free_vcpus=48 pci_devices=None {{(pid=61974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 955.777720] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.870149] env[61974]: DEBUG nova.network.neutron [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Successfully updated port: 1b95be55-2118-4629-b9c8-0063c54f9e94 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 955.888828] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Releasing lock "refresh_cache-a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.889176] env[61974]: DEBUG nova.compute.manager [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Instance network_info: |[{"id": "c1223bcf-9d3f-4e7a-8dee-1bdfb774e108", "address": "fa:16:3e:45:b1:31", "network": {"id": "424f49c1-4580-4987-b137-318add429df8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1883911470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e8153f21f085460db2d0328196e2f347", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1223bcf-9d", "ovs_interfaceid": "c1223bcf-9d3f-4e7a-8dee-1bdfb774e108", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 955.889548] env[61974]: DEBUG oslo_concurrency.lockutils [req-cd62cf33-938d-4421-974a-ef6622306110 req-4e3ab561-8d8d-4c63-a82a-1904990b3eaf service nova] Acquired lock "refresh_cache-a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.889738] env[61974]: DEBUG nova.network.neutron [req-cd62cf33-938d-4421-974a-ef6622306110 req-4e3ab561-8d8d-4c63-a82a-1904990b3eaf service nova] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Refreshing network info cache for port c1223bcf-9d3f-4e7a-8dee-1bdfb774e108 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 955.891203] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:b1:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c1223bcf-9d3f-4e7a-8dee-1bdfb774e108', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 955.899016] env[61974]: DEBUG oslo.service.loopingcall [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 955.902200] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 955.902753] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d3d4e9c5-3404-48f7-b5de-44b7bc90aadf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.924319] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 955.924319] env[61974]: value = "task-1379166" [ 955.924319] env[61974]: _type = "Task" [ 955.924319] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.935047] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379166, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.058772] env[61974]: DEBUG oslo_concurrency.lockutils [req-c3fe457c-206e-4876-8669-25a7a9c40c73 req-f5c57774-aa01-4b18-9d58-f98cb004ff86 service nova] Releasing lock "refresh_cache-59c72be0-46de-4cb8-93d6-0a2c70c90e2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.128673] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquiring lock "45fda940-b7f0-410c-b31a-b5cd365c28fe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.129074] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "45fda940-b7f0-410c-b31a-b5cd365c28fe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.129282] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquiring lock "45fda940-b7f0-410c-b31a-b5cd365c28fe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.129581] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "45fda940-b7f0-410c-b31a-b5cd365c28fe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.129727] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "45fda940-b7f0-410c-b31a-b5cd365c28fe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.131434] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379165, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.131969] env[61974]: INFO nova.compute.manager [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Terminating instance [ 956.133646] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquiring lock "refresh_cache-45fda940-b7f0-410c-b31a-b5cd365c28fe" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.133809] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquired lock "refresh_cache-45fda940-b7f0-410c-b31a-b5cd365c28fe" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.133980] env[61974]: DEBUG nova.network.neutron [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 956.157877] env[61974]: INFO nova.compute.manager [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Took 25.89 seconds to build instance. [ 956.192221] env[61974]: DEBUG nova.network.neutron [req-cd62cf33-938d-4421-974a-ef6622306110 req-4e3ab561-8d8d-4c63-a82a-1904990b3eaf service nova] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Updated VIF entry in instance network info cache for port c1223bcf-9d3f-4e7a-8dee-1bdfb774e108. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 956.192593] env[61974]: DEBUG nova.network.neutron [req-cd62cf33-938d-4421-974a-ef6622306110 req-4e3ab561-8d8d-4c63-a82a-1904990b3eaf service nova] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Updating instance_info_cache with network_info: [{"id": "c1223bcf-9d3f-4e7a-8dee-1bdfb774e108", "address": "fa:16:3e:45:b1:31", "network": {"id": "424f49c1-4580-4987-b137-318add429df8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1883911470-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e8153f21f085460db2d0328196e2f347", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1223bcf-9d", "ovs_interfaceid": "c1223bcf-9d3f-4e7a-8dee-1bdfb774e108", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.211787] env[61974]: DEBUG nova.compute.utils [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 956.213079] env[61974]: DEBUG nova.compute.manager [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 956.213245] env[61974]: DEBUG nova.network.neutron [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 956.252276] env[61974]: DEBUG nova.policy [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '92306aad15774bb19f9ad1766e4049aa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38bb1d7cc5574657a98eaefb81321006', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 956.373268] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "refresh_cache-c06a7599-58e8-4796-9e95-d96327f649d0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.373457] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired lock "refresh_cache-c06a7599-58e8-4796-9e95-d96327f649d0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.373653] env[61974]: DEBUG nova.network.neutron [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 956.433421] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379166, 'name': CreateVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.548318] env[61974]: DEBUG nova.network.neutron [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Successfully created port: 23ed5afc-e506-4637-9fdd-6a2630023f66 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 956.627081] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379165, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532167} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.627081] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] eb6dfd21-0ba6-455c-b14e-80dacaf6b92c/eb6dfd21-0ba6-455c-b14e-80dacaf6b92c.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 956.627081] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 956.627314] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6a0690e2-242c-4036-9d80-a9179efc0230 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.644030] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for the task: (returnval){ [ 956.644030] env[61974]: value = "task-1379167" [ 956.644030] env[61974]: _type = "Task" [ 956.644030] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.652253] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379167, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.653050] env[61974]: DEBUG nova.network.neutron [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 956.659393] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8762aec5-cd1d-46da-8310-449b5e140157 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "ceb0dd02-6441-4923-99f6-73f8eab86fe5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.366s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.694722] env[61974]: DEBUG oslo_concurrency.lockutils [req-cd62cf33-938d-4421-974a-ef6622306110 req-4e3ab561-8d8d-4c63-a82a-1904990b3eaf service nova] Releasing lock "refresh_cache-a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.716720] env[61974]: DEBUG nova.compute.manager [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 956.720289] env[61974]: DEBUG oslo_concurrency.lockutils [None req-42594704-13fb-49a2-b59a-994b1f9b43b6 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.721300] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.591s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.722690] env[61974]: INFO nova.compute.claims [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 956.748235] env[61974]: DEBUG nova.network.neutron [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.930395] env[61974]: DEBUG nova.network.neutron [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 956.938321] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379166, 'name': CreateVM_Task, 'duration_secs': 0.568167} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.938546] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 956.939284] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.939560] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.939920] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 956.940227] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73f6575c-917e-4fb4-928d-c3f2d5201ae6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.947105] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for the task: (returnval){ [ 956.947105] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]528a6d86-d024-e727-1219-fc04faa90801" [ 956.947105] env[61974]: _type = "Task" [ 956.947105] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.955942] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]528a6d86-d024-e727-1219-fc04faa90801, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.154532] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379167, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.306406} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.154861] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 957.155670] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46d77412-fa71-4c6d-b7c8-870f7d4f43c4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.186749] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] eb6dfd21-0ba6-455c-b14e-80dacaf6b92c/eb6dfd21-0ba6-455c-b14e-80dacaf6b92c.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 957.187102] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07b546d1-60e5-4d4c-8b8a-dd032bc0f83b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.214105] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for the task: (returnval){ [ 957.214105] env[61974]: value = "task-1379168" [ 957.214105] env[61974]: _type = "Task" [ 957.214105] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.223283] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379168, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.250494] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Releasing lock "refresh_cache-45fda940-b7f0-410c-b31a-b5cd365c28fe" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.251011] env[61974]: DEBUG nova.compute.manager [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 957.251510] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 957.252103] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a3c426-ce14-4b6f-a682-369a71fa616e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.261572] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 957.261829] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0027e296-a57d-4a6b-984e-bad048e2221f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.269453] env[61974]: DEBUG oslo_vmware.api [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 957.269453] env[61974]: value = "task-1379169" [ 957.269453] env[61974]: _type = "Task" [ 957.269453] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.277164] env[61974]: DEBUG oslo_vmware.api [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379169, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.367312] env[61974]: DEBUG nova.network.neutron [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Updating instance_info_cache with network_info: [{"id": "1b95be55-2118-4629-b9c8-0063c54f9e94", "address": "fa:16:3e:d0:50:69", "network": {"id": "bbb72d46-05ed-4ca3-80a8-0e9b6e6ccb5d", "bridge": "br-int", "label": "tempest-ServersTestJSON-148366285-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4757d24b61794cfcaefff2ad44e02b74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b95be55-21", "ovs_interfaceid": "1b95be55-2118-4629-b9c8-0063c54f9e94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.457301] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]528a6d86-d024-e727-1219-fc04faa90801, 'name': SearchDatastore_Task, 'duration_secs': 0.009238} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.457704] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.457962] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 957.458208] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.458418] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.458613] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 957.458889] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ad7abde-5ef3-482c-bc4a-970d5ccf3cbc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.467094] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 957.467269] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 957.468000] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2dfaee3-0a4f-44a3-aa36-55bfdf8cdca2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.473469] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for the task: (returnval){ [ 957.473469] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52dde069-50e5-0955-3454-ceeab93c31bb" [ 957.473469] env[61974]: _type = "Task" [ 957.473469] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.481586] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52dde069-50e5-0955-3454-ceeab93c31bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.522070] env[61974]: DEBUG nova.compute.manager [req-0b22674e-661e-492a-89ff-0c37646a59d4 req-24064610-9f88-4f82-9ebd-53a6dfca10b5 service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Received event network-changed-a342d02a-7577-428c-946f-e5725112ceec {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 957.522295] env[61974]: DEBUG nova.compute.manager [req-0b22674e-661e-492a-89ff-0c37646a59d4 req-24064610-9f88-4f82-9ebd-53a6dfca10b5 service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Refreshing instance network info cache due to event network-changed-a342d02a-7577-428c-946f-e5725112ceec. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 957.522513] env[61974]: DEBUG oslo_concurrency.lockutils [req-0b22674e-661e-492a-89ff-0c37646a59d4 req-24064610-9f88-4f82-9ebd-53a6dfca10b5 service nova] Acquiring lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.522663] env[61974]: DEBUG oslo_concurrency.lockutils [req-0b22674e-661e-492a-89ff-0c37646a59d4 req-24064610-9f88-4f82-9ebd-53a6dfca10b5 service nova] Acquired lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.522825] env[61974]: DEBUG nova.network.neutron [req-0b22674e-661e-492a-89ff-0c37646a59d4 req-24064610-9f88-4f82-9ebd-53a6dfca10b5 service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Refreshing network info cache for port a342d02a-7577-428c-946f-e5725112ceec {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 957.632841] env[61974]: DEBUG nova.compute.manager [req-2b7ef901-b96f-4f0a-aeeb-697d99a87dba req-400a7026-7455-479c-b5bf-9dfe3642b713 service nova] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Received event network-vif-plugged-1b95be55-2118-4629-b9c8-0063c54f9e94 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 957.635520] env[61974]: DEBUG oslo_concurrency.lockutils [req-2b7ef901-b96f-4f0a-aeeb-697d99a87dba req-400a7026-7455-479c-b5bf-9dfe3642b713 service nova] Acquiring lock "c06a7599-58e8-4796-9e95-d96327f649d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.635772] env[61974]: DEBUG oslo_concurrency.lockutils [req-2b7ef901-b96f-4f0a-aeeb-697d99a87dba req-400a7026-7455-479c-b5bf-9dfe3642b713 service nova] Lock "c06a7599-58e8-4796-9e95-d96327f649d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.635955] env[61974]: DEBUG oslo_concurrency.lockutils [req-2b7ef901-b96f-4f0a-aeeb-697d99a87dba req-400a7026-7455-479c-b5bf-9dfe3642b713 service nova] Lock "c06a7599-58e8-4796-9e95-d96327f649d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.636165] env[61974]: DEBUG nova.compute.manager [req-2b7ef901-b96f-4f0a-aeeb-697d99a87dba req-400a7026-7455-479c-b5bf-9dfe3642b713 service nova] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] No waiting events found dispatching network-vif-plugged-1b95be55-2118-4629-b9c8-0063c54f9e94 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 957.636348] env[61974]: WARNING nova.compute.manager [req-2b7ef901-b96f-4f0a-aeeb-697d99a87dba req-400a7026-7455-479c-b5bf-9dfe3642b713 service nova] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Received unexpected event network-vif-plugged-1b95be55-2118-4629-b9c8-0063c54f9e94 for instance with vm_state building and task_state spawning. [ 957.636516] env[61974]: DEBUG nova.compute.manager [req-2b7ef901-b96f-4f0a-aeeb-697d99a87dba req-400a7026-7455-479c-b5bf-9dfe3642b713 service nova] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Received event network-changed-1b95be55-2118-4629-b9c8-0063c54f9e94 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 957.636678] env[61974]: DEBUG nova.compute.manager [req-2b7ef901-b96f-4f0a-aeeb-697d99a87dba req-400a7026-7455-479c-b5bf-9dfe3642b713 service nova] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Refreshing instance network info cache due to event network-changed-1b95be55-2118-4629-b9c8-0063c54f9e94. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 957.636847] env[61974]: DEBUG oslo_concurrency.lockutils [req-2b7ef901-b96f-4f0a-aeeb-697d99a87dba req-400a7026-7455-479c-b5bf-9dfe3642b713 service nova] Acquiring lock "refresh_cache-c06a7599-58e8-4796-9e95-d96327f649d0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.724921] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379168, 'name': ReconfigVM_Task, 'duration_secs': 0.33341} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.725671] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Reconfigured VM instance instance-0000004d to attach disk [datastore1] eb6dfd21-0ba6-455c-b14e-80dacaf6b92c/eb6dfd21-0ba6-455c-b14e-80dacaf6b92c.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 957.726404] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3c3f76a6-6b70-48fe-8005-63df3ae2ff19 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.733560] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for the task: (returnval){ [ 957.733560] env[61974]: value = "task-1379170" [ 957.733560] env[61974]: _type = "Task" [ 957.733560] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.740934] env[61974]: DEBUG nova.compute.manager [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 957.748726] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379170, 'name': Rename_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.764790] env[61974]: DEBUG nova.virt.hardware [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 957.765109] env[61974]: DEBUG nova.virt.hardware [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 957.765277] env[61974]: DEBUG nova.virt.hardware [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 957.765466] env[61974]: DEBUG nova.virt.hardware [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 957.765618] env[61974]: DEBUG nova.virt.hardware [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 957.765769] env[61974]: DEBUG nova.virt.hardware [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 957.765978] env[61974]: DEBUG nova.virt.hardware [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 957.766170] env[61974]: DEBUG nova.virt.hardware [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 957.766371] env[61974]: DEBUG nova.virt.hardware [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 957.766549] env[61974]: DEBUG nova.virt.hardware [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 957.766729] env[61974]: DEBUG nova.virt.hardware [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 957.767934] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e01066-59de-4a20-9e43-72f133a30d0c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.783490] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca0525ab-083c-45ae-9035-3dd18f54ee3b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.787226] env[61974]: DEBUG oslo_vmware.api [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379169, 'name': PowerOffVM_Task, 'duration_secs': 0.183247} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.789674] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 957.789850] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 957.790849] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-57846e5a-ddd2-4377-bcf1-8f9c30ec0873 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.827436] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 957.827691] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 957.827834] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Deleting the datastore file [datastore1] 45fda940-b7f0-410c-b31a-b5cd365c28fe {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 957.828689] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da14dd92-a8a3-43f1-847b-7e68cf108a3e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.836998] env[61974]: DEBUG oslo_vmware.api [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 957.836998] env[61974]: value = "task-1379172" [ 957.836998] env[61974]: _type = "Task" [ 957.836998] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.845068] env[61974]: DEBUG oslo_vmware.api [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379172, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.870184] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Releasing lock "refresh_cache-c06a7599-58e8-4796-9e95-d96327f649d0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.870569] env[61974]: DEBUG nova.compute.manager [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Instance network_info: |[{"id": "1b95be55-2118-4629-b9c8-0063c54f9e94", "address": "fa:16:3e:d0:50:69", "network": {"id": "bbb72d46-05ed-4ca3-80a8-0e9b6e6ccb5d", "bridge": "br-int", "label": "tempest-ServersTestJSON-148366285-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4757d24b61794cfcaefff2ad44e02b74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b95be55-21", "ovs_interfaceid": "1b95be55-2118-4629-b9c8-0063c54f9e94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 957.870961] env[61974]: DEBUG oslo_concurrency.lockutils [req-2b7ef901-b96f-4f0a-aeeb-697d99a87dba req-400a7026-7455-479c-b5bf-9dfe3642b713 service nova] Acquired lock "refresh_cache-c06a7599-58e8-4796-9e95-d96327f649d0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.871224] env[61974]: DEBUG nova.network.neutron [req-2b7ef901-b96f-4f0a-aeeb-697d99a87dba req-400a7026-7455-479c-b5bf-9dfe3642b713 service nova] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Refreshing network info cache for port 1b95be55-2118-4629-b9c8-0063c54f9e94 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 957.872631] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:50:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4728adca-2846-416a-91a3-deb898faf1f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1b95be55-2118-4629-b9c8-0063c54f9e94', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 957.880654] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Creating folder: Project (4757d24b61794cfcaefff2ad44e02b74). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 957.885035] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a107ea38-ae46-4b35-8529-c0e75b6da504 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.898196] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Created folder: Project (4757d24b61794cfcaefff2ad44e02b74) in parent group-v292912. [ 957.898196] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Creating folder: Instances. Parent ref: group-v292987. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 957.898196] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ccfd6f22-9bf5-42c0-b5b9-77bf1a3a26b4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.911884] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Created folder: Instances in parent group-v292987. [ 957.911884] env[61974]: DEBUG oslo.service.loopingcall [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 957.911884] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 957.912844] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-019e5c81-a5cf-46b1-969e-105b95fc4948 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.934099] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 957.934099] env[61974]: value = "task-1379175" [ 957.934099] env[61974]: _type = "Task" [ 957.934099] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.941432] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379175, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.984185] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52dde069-50e5-0955-3454-ceeab93c31bb, 'name': SearchDatastore_Task, 'duration_secs': 0.008986} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.987283] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a69788cf-7ff8-409d-87f2-99d82d4a1352 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.992929] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for the task: (returnval){ [ 957.992929] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52e832a4-7fd0-1e13-172b-5c1649a8d698" [ 957.992929] env[61974]: _type = "Task" [ 957.992929] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.003010] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52e832a4-7fd0-1e13-172b-5c1649a8d698, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.101263] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2302cd-2caf-484e-af35-3affc37c6df4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.106768] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c780433d-2522-4973-8491-4b26487e68d5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.141251] env[61974]: DEBUG nova.network.neutron [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Successfully updated port: 23ed5afc-e506-4637-9fdd-6a2630023f66 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 958.143273] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc3785e2-b205-40f1-95be-62bd3a400717 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.155892] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de89117-e86d-40b0-9c36-ce8646966df5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.177759] env[61974]: DEBUG nova.compute.provider_tree [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.243509] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379170, 'name': Rename_Task, 'duration_secs': 0.174478} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.243933] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 958.244319] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9294a0c-cd22-4ff1-9b01-f2f2ce862fb5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.250557] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for the task: (returnval){ [ 958.250557] env[61974]: value = "task-1379176" [ 958.250557] env[61974]: _type = "Task" [ 958.250557] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.261015] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379176, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.289189] env[61974]: DEBUG nova.network.neutron [req-0b22674e-661e-492a-89ff-0c37646a59d4 req-24064610-9f88-4f82-9ebd-53a6dfca10b5 service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Updated VIF entry in instance network info cache for port a342d02a-7577-428c-946f-e5725112ceec. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 958.289616] env[61974]: DEBUG nova.network.neutron [req-0b22674e-661e-492a-89ff-0c37646a59d4 req-24064610-9f88-4f82-9ebd-53a6dfca10b5 service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Updating instance_info_cache with network_info: [{"id": "a342d02a-7577-428c-946f-e5725112ceec", "address": "fa:16:3e:99:e8:62", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa342d02a-75", "ovs_interfaceid": "a342d02a-7577-428c-946f-e5725112ceec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.346851] env[61974]: DEBUG oslo_vmware.api [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379172, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.113497} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.347114] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 958.347319] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 958.347501] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 958.347684] env[61974]: INFO nova.compute.manager [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Took 1.10 seconds to destroy the instance on the hypervisor. [ 958.348305] env[61974]: DEBUG oslo.service.loopingcall [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 958.348554] env[61974]: DEBUG nova.compute.manager [-] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 958.348678] env[61974]: DEBUG nova.network.neutron [-] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 958.365159] env[61974]: DEBUG nova.network.neutron [-] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 958.394902] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "9c26e20b-dfc4-432c-a851-499dbea18f01" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.395205] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "9c26e20b-dfc4-432c-a851-499dbea18f01" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.443223] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379175, 'name': CreateVM_Task, 'duration_secs': 0.357782} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.443416] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 958.444103] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.444192] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.444538] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 958.444804] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e67fae0e-3411-4120-94da-3c859ae6779b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.452195] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 958.452195] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52626db9-94cc-143c-1775-d0bd03061808" [ 958.452195] env[61974]: _type = "Task" [ 958.452195] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.462544] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52626db9-94cc-143c-1775-d0bd03061808, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.501769] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52e832a4-7fd0-1e13-172b-5c1649a8d698, 'name': SearchDatastore_Task, 'duration_secs': 0.013825} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.502047] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.502321] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec/a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 958.502578] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1546a674-707f-45a6-b8d1-3fc2c9b5a123 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.507998] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for the task: (returnval){ [ 958.507998] env[61974]: value = "task-1379177" [ 958.507998] env[61974]: _type = "Task" [ 958.507998] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.517995] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379177, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.617448] env[61974]: DEBUG nova.network.neutron [req-2b7ef901-b96f-4f0a-aeeb-697d99a87dba req-400a7026-7455-479c-b5bf-9dfe3642b713 service nova] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Updated VIF entry in instance network info cache for port 1b95be55-2118-4629-b9c8-0063c54f9e94. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 958.617846] env[61974]: DEBUG nova.network.neutron [req-2b7ef901-b96f-4f0a-aeeb-697d99a87dba req-400a7026-7455-479c-b5bf-9dfe3642b713 service nova] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Updating instance_info_cache with network_info: [{"id": "1b95be55-2118-4629-b9c8-0063c54f9e94", "address": "fa:16:3e:d0:50:69", "network": {"id": "bbb72d46-05ed-4ca3-80a8-0e9b6e6ccb5d", "bridge": "br-int", "label": "tempest-ServersTestJSON-148366285-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4757d24b61794cfcaefff2ad44e02b74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b95be55-21", "ovs_interfaceid": "1b95be55-2118-4629-b9c8-0063c54f9e94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.649941] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "refresh_cache-450956aa-cc55-481c-acf6-287abc8b8efe" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.650073] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquired lock "refresh_cache-450956aa-cc55-481c-acf6-287abc8b8efe" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.650316] env[61974]: DEBUG nova.network.neutron [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 958.681075] env[61974]: DEBUG nova.scheduler.client.report [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 958.761325] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379176, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.792761] env[61974]: DEBUG oslo_concurrency.lockutils [req-0b22674e-661e-492a-89ff-0c37646a59d4 req-24064610-9f88-4f82-9ebd-53a6dfca10b5 service nova] Releasing lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.867835] env[61974]: DEBUG nova.network.neutron [-] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.899011] env[61974]: DEBUG nova.compute.manager [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 958.966128] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52626db9-94cc-143c-1775-d0bd03061808, 'name': SearchDatastore_Task, 'duration_secs': 0.009405} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.966514] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.966821] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 958.967128] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.967307] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.967489] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 958.968094] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a727c6a0-cbab-4bf4-99cd-b5841a773fff {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.980031] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 958.980412] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 958.981057] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd2ca29e-880e-4006-9cd8-2c0fa56e8a00 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.987462] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 958.987462] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]525667d3-bf28-ab06-c3f5-7be65d0af4bf" [ 958.987462] env[61974]: _type = "Task" [ 958.987462] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.997265] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]525667d3-bf28-ab06-c3f5-7be65d0af4bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.017864] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379177, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48678} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.017967] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec/a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 959.018223] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 959.018559] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-35ab1b72-9a71-4cc8-bb66-c033cf862aca {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.024987] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for the task: (returnval){ [ 959.024987] env[61974]: value = "task-1379178" [ 959.024987] env[61974]: _type = "Task" [ 959.024987] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.032306] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379178, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.120874] env[61974]: DEBUG oslo_concurrency.lockutils [req-2b7ef901-b96f-4f0a-aeeb-697d99a87dba req-400a7026-7455-479c-b5bf-9dfe3642b713 service nova] Releasing lock "refresh_cache-c06a7599-58e8-4796-9e95-d96327f649d0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.181558] env[61974]: DEBUG nova.network.neutron [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 959.186590] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.465s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.187099] env[61974]: DEBUG nova.compute.manager [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 959.189963] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.086s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.190208] env[61974]: DEBUG nova.objects.instance [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lazy-loading 'resources' on Instance uuid f0601d26-4e29-4946-bb52-50e2a2163535 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 959.262600] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379176, 'name': PowerOnVM_Task, 'duration_secs': 0.860546} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.262862] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 959.263082] env[61974]: INFO nova.compute.manager [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Took 8.86 seconds to spawn the instance on the hypervisor. [ 959.263272] env[61974]: DEBUG nova.compute.manager [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 959.264132] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad2324d-9e15-4cea-9574-6300202a9506 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.326733] env[61974]: DEBUG nova.network.neutron [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Updating instance_info_cache with network_info: [{"id": "23ed5afc-e506-4637-9fdd-6a2630023f66", "address": "fa:16:3e:4a:2d:a2", "network": {"id": "8888bbd4-6828-4d33-8135-899b225e8a70", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-605557774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38bb1d7cc5574657a98eaefb81321006", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23ed5afc-e5", "ovs_interfaceid": "23ed5afc-e506-4637-9fdd-6a2630023f66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.370484] env[61974]: INFO nova.compute.manager [-] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Took 1.02 seconds to deallocate network for instance. [ 959.421770] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.498112] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]525667d3-bf28-ab06-c3f5-7be65d0af4bf, 'name': SearchDatastore_Task, 'duration_secs': 0.009601} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.498985] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1876bd18-21b0-4232-8c4e-d3ce58bbea93 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.503806] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 959.503806] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52d0d923-8fac-affe-2b56-5f65e7b10c98" [ 959.503806] env[61974]: _type = "Task" [ 959.503806] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.511041] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d0d923-8fac-affe-2b56-5f65e7b10c98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.532185] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379178, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070568} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.532427] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 959.533153] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b9a7d5-7df1-41b6-9d79-4397e016d2a0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.554433] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec/a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 959.554670] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9b3dcbf-444e-4cc2-aa9f-3fe18337ce6b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.572589] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for the task: (returnval){ [ 959.572589] env[61974]: value = "task-1379179" [ 959.572589] env[61974]: _type = "Task" [ 959.572589] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.579730] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379179, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.661197] env[61974]: DEBUG nova.compute.manager [req-0f556caf-ddaf-48db-a6ed-ecab9279be94 req-afdf18aa-c698-431a-98ff-7304d354701b service nova] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Received event network-vif-plugged-23ed5afc-e506-4637-9fdd-6a2630023f66 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 959.661197] env[61974]: DEBUG oslo_concurrency.lockutils [req-0f556caf-ddaf-48db-a6ed-ecab9279be94 req-afdf18aa-c698-431a-98ff-7304d354701b service nova] Acquiring lock "450956aa-cc55-481c-acf6-287abc8b8efe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.661197] env[61974]: DEBUG oslo_concurrency.lockutils [req-0f556caf-ddaf-48db-a6ed-ecab9279be94 req-afdf18aa-c698-431a-98ff-7304d354701b service nova] Lock "450956aa-cc55-481c-acf6-287abc8b8efe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.661197] env[61974]: DEBUG oslo_concurrency.lockutils [req-0f556caf-ddaf-48db-a6ed-ecab9279be94 req-afdf18aa-c698-431a-98ff-7304d354701b service nova] Lock "450956aa-cc55-481c-acf6-287abc8b8efe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.661197] env[61974]: DEBUG nova.compute.manager [req-0f556caf-ddaf-48db-a6ed-ecab9279be94 req-afdf18aa-c698-431a-98ff-7304d354701b service nova] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] No waiting events found dispatching network-vif-plugged-23ed5afc-e506-4637-9fdd-6a2630023f66 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 959.661197] env[61974]: WARNING nova.compute.manager [req-0f556caf-ddaf-48db-a6ed-ecab9279be94 req-afdf18aa-c698-431a-98ff-7304d354701b service nova] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Received unexpected event network-vif-plugged-23ed5afc-e506-4637-9fdd-6a2630023f66 for instance with vm_state building and task_state spawning. [ 959.661393] env[61974]: DEBUG nova.compute.manager [req-0f556caf-ddaf-48db-a6ed-ecab9279be94 req-afdf18aa-c698-431a-98ff-7304d354701b service nova] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Received event network-changed-23ed5afc-e506-4637-9fdd-6a2630023f66 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 959.661538] env[61974]: DEBUG nova.compute.manager [req-0f556caf-ddaf-48db-a6ed-ecab9279be94 req-afdf18aa-c698-431a-98ff-7304d354701b service nova] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Refreshing instance network info cache due to event network-changed-23ed5afc-e506-4637-9fdd-6a2630023f66. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 959.661734] env[61974]: DEBUG oslo_concurrency.lockutils [req-0f556caf-ddaf-48db-a6ed-ecab9279be94 req-afdf18aa-c698-431a-98ff-7304d354701b service nova] Acquiring lock "refresh_cache-450956aa-cc55-481c-acf6-287abc8b8efe" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.693319] env[61974]: DEBUG nova.compute.utils [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 959.697057] env[61974]: DEBUG nova.compute.manager [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 959.697227] env[61974]: DEBUG nova.network.neutron [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 959.763324] env[61974]: DEBUG nova.policy [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '92306aad15774bb19f9ad1766e4049aa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38bb1d7cc5574657a98eaefb81321006', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 959.788356] env[61974]: INFO nova.compute.manager [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Took 27.21 seconds to build instance. [ 959.831186] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Releasing lock "refresh_cache-450956aa-cc55-481c-acf6-287abc8b8efe" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.831253] env[61974]: DEBUG nova.compute.manager [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Instance network_info: |[{"id": "23ed5afc-e506-4637-9fdd-6a2630023f66", "address": "fa:16:3e:4a:2d:a2", "network": {"id": "8888bbd4-6828-4d33-8135-899b225e8a70", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-605557774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38bb1d7cc5574657a98eaefb81321006", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23ed5afc-e5", "ovs_interfaceid": "23ed5afc-e506-4637-9fdd-6a2630023f66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 959.831776] env[61974]: DEBUG oslo_concurrency.lockutils [req-0f556caf-ddaf-48db-a6ed-ecab9279be94 req-afdf18aa-c698-431a-98ff-7304d354701b service nova] Acquired lock "refresh_cache-450956aa-cc55-481c-acf6-287abc8b8efe" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.831968] env[61974]: DEBUG nova.network.neutron [req-0f556caf-ddaf-48db-a6ed-ecab9279be94 req-afdf18aa-c698-431a-98ff-7304d354701b service nova] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Refreshing network info cache for port 23ed5afc-e506-4637-9fdd-6a2630023f66 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 959.833141] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:2d:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '462a7219-4deb-4225-9cf7-3131ef280363', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '23ed5afc-e506-4637-9fdd-6a2630023f66', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 959.840577] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Creating folder: Project (38bb1d7cc5574657a98eaefb81321006). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 959.841083] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0edd205e-049d-42a0-9b0a-7cf916095390 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.854803] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Created folder: Project (38bb1d7cc5574657a98eaefb81321006) in parent group-v292912. [ 959.854951] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Creating folder: Instances. Parent ref: group-v292990. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 959.857405] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-29dc30c4-4e61-4c25-add9-87bcc898c3b5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.865691] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Created folder: Instances in parent group-v292990. [ 959.866196] env[61974]: DEBUG oslo.service.loopingcall [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 959.866196] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 959.866391] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8bbaf01e-a39b-4b3f-aa4b-a7361e0f2beb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.885642] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.890901] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 959.890901] env[61974]: value = "task-1379182" [ 959.890901] env[61974]: _type = "Task" [ 959.890901] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.901506] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379182, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.015402] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d0d923-8fac-affe-2b56-5f65e7b10c98, 'name': SearchDatastore_Task, 'duration_secs': 0.00846} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.015837] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.016225] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] c06a7599-58e8-4796-9e95-d96327f649d0/c06a7599-58e8-4796-9e95-d96327f649d0.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 960.016583] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1629d77d-ef41-4334-8e41-84c370ea0bec {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.020549] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9f5a02-256d-437b-aabf-3c320b1f3f31 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.031469] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8755ddd-c877-4378-8301-b035c42125e6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.034915] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 960.034915] env[61974]: value = "task-1379183" [ 960.034915] env[61974]: _type = "Task" [ 960.034915] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.069513] env[61974]: DEBUG nova.network.neutron [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Successfully created port: f70a964e-a247-4cd3-a6b1-d308d7d7cb92 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 960.072743] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00221e63-2b80-46d9-8cd8-71a85df8edcf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.079016] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379183, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.090948] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-799bc30f-02d8-481d-8076-4b5fb92c3888 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.095082] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379179, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.105173] env[61974]: DEBUG nova.compute.provider_tree [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 960.200936] env[61974]: DEBUG nova.compute.manager [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 960.294748] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "eb6dfd21-0ba6-455c-b14e-80dacaf6b92c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.443s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.403329] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379182, 'name': CreateVM_Task} progress is 25%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.544837] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379183, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.587915] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379179, 'name': ReconfigVM_Task, 'duration_secs': 0.928859} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.588354] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Reconfigured VM instance instance-0000004e to attach disk [datastore1] a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec/a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 960.588901] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1eee4212-178b-44da-a154-54b71466254b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.595136] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for the task: (returnval){ [ 960.595136] env[61974]: value = "task-1379184" [ 960.595136] env[61974]: _type = "Task" [ 960.595136] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.608511] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379184, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.608511] env[61974]: DEBUG nova.scheduler.client.report [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 960.636771] env[61974]: DEBUG nova.network.neutron [req-0f556caf-ddaf-48db-a6ed-ecab9279be94 req-afdf18aa-c698-431a-98ff-7304d354701b service nova] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Updated VIF entry in instance network info cache for port 23ed5afc-e506-4637-9fdd-6a2630023f66. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 960.637151] env[61974]: DEBUG nova.network.neutron [req-0f556caf-ddaf-48db-a6ed-ecab9279be94 req-afdf18aa-c698-431a-98ff-7304d354701b service nova] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Updating instance_info_cache with network_info: [{"id": "23ed5afc-e506-4637-9fdd-6a2630023f66", "address": "fa:16:3e:4a:2d:a2", "network": {"id": "8888bbd4-6828-4d33-8135-899b225e8a70", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-605557774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38bb1d7cc5574657a98eaefb81321006", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23ed5afc-e5", "ovs_interfaceid": "23ed5afc-e506-4637-9fdd-6a2630023f66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.902057] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379182, 'name': CreateVM_Task, 'duration_secs': 0.679499} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.902172] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 960.902872] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.903066] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.903431] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 960.903793] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f296ea5b-23b8-46fb-a9b8-604b8aaf0381 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.908878] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 960.908878] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52964b1c-a424-493b-4292-f34c3d714815" [ 960.908878] env[61974]: _type = "Task" [ 960.908878] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.917763] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52964b1c-a424-493b-4292-f34c3d714815, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.044830] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379183, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.56929} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.045152] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] c06a7599-58e8-4796-9e95-d96327f649d0/c06a7599-58e8-4796-9e95-d96327f649d0.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 961.045346] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 961.045612] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-62d9b589-c399-4b5c-9011-1dbddc03a222 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.052358] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 961.052358] env[61974]: value = "task-1379185" [ 961.052358] env[61974]: _type = "Task" [ 961.052358] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.060931] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379185, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.108204] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379184, 'name': Rename_Task, 'duration_secs': 0.22915} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.108591] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 961.108841] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd124c03-1b70-4ec9-a6f2-d62a922c2210 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.112508] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.923s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.115678] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.669s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.116122] env[61974]: DEBUG nova.objects.instance [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lazy-loading 'resources' on Instance uuid 635f362a-582e-44bc-85d8-8a69943982b0 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 961.117722] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for the task: (returnval){ [ 961.117722] env[61974]: value = "task-1379186" [ 961.117722] env[61974]: _type = "Task" [ 961.117722] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.127593] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379186, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.138641] env[61974]: INFO nova.scheduler.client.report [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Deleted allocations for instance f0601d26-4e29-4946-bb52-50e2a2163535 [ 961.139899] env[61974]: DEBUG oslo_concurrency.lockutils [req-0f556caf-ddaf-48db-a6ed-ecab9279be94 req-afdf18aa-c698-431a-98ff-7304d354701b service nova] Releasing lock "refresh_cache-450956aa-cc55-481c-acf6-287abc8b8efe" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.214041] env[61974]: DEBUG nova.compute.manager [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 961.239749] env[61974]: DEBUG nova.virt.hardware [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 961.240068] env[61974]: DEBUG nova.virt.hardware [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 961.240244] env[61974]: DEBUG nova.virt.hardware [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 961.240469] env[61974]: DEBUG nova.virt.hardware [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 961.240702] env[61974]: DEBUG nova.virt.hardware [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 961.240879] env[61974]: DEBUG nova.virt.hardware [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 961.241154] env[61974]: DEBUG nova.virt.hardware [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 961.241341] env[61974]: DEBUG nova.virt.hardware [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 961.241480] env[61974]: DEBUG nova.virt.hardware [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 961.241614] env[61974]: DEBUG nova.virt.hardware [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 961.241798] env[61974]: DEBUG nova.virt.hardware [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 961.242755] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c3dd5eb-7ae8-4ba5-ac15-f8d775eba825 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.253433] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc47ba4-8982-4cdc-807f-e087d54e2a3f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.420437] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52964b1c-a424-493b-4292-f34c3d714815, 'name': SearchDatastore_Task, 'duration_secs': 0.009128} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.420890] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.421154] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 961.421405] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 961.421596] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.421909] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 961.422137] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c646827-6e7e-46c5-a5a0-4f066bdf3394 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.431401] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 961.431619] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 961.432427] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c54137a-6516-4a8d-a425-e5d52607f3c6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.437673] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 961.437673] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5289f86b-5b99-97d5-25b3-36d511374aa8" [ 961.437673] env[61974]: _type = "Task" [ 961.437673] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.445701] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5289f86b-5b99-97d5-25b3-36d511374aa8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.562853] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379185, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081526} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.563154] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 961.563959] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2125dbcc-e1d0-401b-976a-9f77d8c39fe3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.588563] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] c06a7599-58e8-4796-9e95-d96327f649d0/c06a7599-58e8-4796-9e95-d96327f649d0.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 961.589037] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d21efcd1-965d-41d1-8966-40577a1c99da {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.611076] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 961.611076] env[61974]: value = "task-1379187" [ 961.611076] env[61974]: _type = "Task" [ 961.611076] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.618027] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379187, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.626049] env[61974]: DEBUG nova.compute.manager [req-6c8de3df-97b3-40ce-abe9-87b2d816ff78 req-d6ce487e-9588-402e-aa4d-46927f3062b8 service nova] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Received event network-vif-plugged-f70a964e-a247-4cd3-a6b1-d308d7d7cb92 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 961.626298] env[61974]: DEBUG oslo_concurrency.lockutils [req-6c8de3df-97b3-40ce-abe9-87b2d816ff78 req-d6ce487e-9588-402e-aa4d-46927f3062b8 service nova] Acquiring lock "68ad5903-e502-406b-a19e-9e4c28aa5035-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.626518] env[61974]: DEBUG oslo_concurrency.lockutils [req-6c8de3df-97b3-40ce-abe9-87b2d816ff78 req-d6ce487e-9588-402e-aa4d-46927f3062b8 service nova] Lock "68ad5903-e502-406b-a19e-9e4c28aa5035-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.626693] env[61974]: DEBUG oslo_concurrency.lockutils [req-6c8de3df-97b3-40ce-abe9-87b2d816ff78 req-d6ce487e-9588-402e-aa4d-46927f3062b8 service nova] Lock "68ad5903-e502-406b-a19e-9e4c28aa5035-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.626876] env[61974]: DEBUG nova.compute.manager [req-6c8de3df-97b3-40ce-abe9-87b2d816ff78 req-d6ce487e-9588-402e-aa4d-46927f3062b8 service nova] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] No waiting events found dispatching network-vif-plugged-f70a964e-a247-4cd3-a6b1-d308d7d7cb92 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 961.627062] env[61974]: WARNING nova.compute.manager [req-6c8de3df-97b3-40ce-abe9-87b2d816ff78 req-d6ce487e-9588-402e-aa4d-46927f3062b8 service nova] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Received unexpected event network-vif-plugged-f70a964e-a247-4cd3-a6b1-d308d7d7cb92 for instance with vm_state building and task_state spawning. [ 961.634312] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379186, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.648235] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a1b0648-40ac-4f33-912b-3dcaf9c8e2ef tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "f0601d26-4e29-4946-bb52-50e2a2163535" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.219s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.724763] env[61974]: DEBUG nova.network.neutron [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Successfully updated port: f70a964e-a247-4cd3-a6b1-d308d7d7cb92 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 961.897249] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0936e40b-3d1a-4fa8-989a-971d84394685 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.905050] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0634b1b-7979-438a-a097-ae8d84d715c1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.936505] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203f0916-3a74-4d92-985c-2b4b1a266c74 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.949919] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-610b9fdd-a08b-4b10-8439-969346fce303 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.953516] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5289f86b-5b99-97d5-25b3-36d511374aa8, 'name': SearchDatastore_Task, 'duration_secs': 0.008979} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.954582] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49bd6903-092a-4b1c-aefb-018347123f7d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.964509] env[61974]: DEBUG nova.compute.provider_tree [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 961.968848] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 961.968848] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52edb295-2838-cd37-9308-5aa270c699b9" [ 961.968848] env[61974]: _type = "Task" [ 961.968848] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.976581] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52edb295-2838-cd37-9308-5aa270c699b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.121181] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379187, 'name': ReconfigVM_Task, 'duration_secs': 0.410857} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.124141] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Reconfigured VM instance instance-0000004f to attach disk [datastore1] c06a7599-58e8-4796-9e95-d96327f649d0/c06a7599-58e8-4796-9e95-d96327f649d0.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 962.124935] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b397101d-6703-410a-b11a-49100f116fba {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.132030] env[61974]: DEBUG oslo_vmware.api [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379186, 'name': PowerOnVM_Task, 'duration_secs': 0.520745} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.133217] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 962.133438] env[61974]: INFO nova.compute.manager [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Took 9.24 seconds to spawn the instance on the hypervisor. [ 962.133617] env[61974]: DEBUG nova.compute.manager [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 962.133939] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 962.133939] env[61974]: value = "task-1379188" [ 962.133939] env[61974]: _type = "Task" [ 962.133939] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.134621] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7332a4f-3aa0-44f2-b9b6-e9d768c5dbef {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.151078] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379188, 'name': Rename_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.227609] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "refresh_cache-68ad5903-e502-406b-a19e-9e4c28aa5035" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.228803] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquired lock "refresh_cache-68ad5903-e502-406b-a19e-9e4c28aa5035" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.228803] env[61974]: DEBUG nova.network.neutron [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 962.467790] env[61974]: DEBUG nova.scheduler.client.report [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 962.480956] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52edb295-2838-cd37-9308-5aa270c699b9, 'name': SearchDatastore_Task, 'duration_secs': 0.016593} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.481517] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 962.481558] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 450956aa-cc55-481c-acf6-287abc8b8efe/450956aa-cc55-481c-acf6-287abc8b8efe.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 962.481790] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e0ed701-2d00-4e1d-89d9-0dfc2be6ef87 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.489521] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 962.489521] env[61974]: value = "task-1379189" [ 962.489521] env[61974]: _type = "Task" [ 962.489521] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.497728] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379189, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.647383] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379188, 'name': Rename_Task, 'duration_secs': 0.143249} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.647706] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 962.647984] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a1cad6f6-8b1e-4c1a-b215-059f2cc17c43 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.657500] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 962.657500] env[61974]: value = "task-1379190" [ 962.657500] env[61974]: _type = "Task" [ 962.657500] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.664852] env[61974]: INFO nova.compute.manager [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Took 29.24 seconds to build instance. [ 962.671520] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379190, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.767250] env[61974]: DEBUG nova.network.neutron [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 962.931536] env[61974]: DEBUG nova.network.neutron [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Updating instance_info_cache with network_info: [{"id": "f70a964e-a247-4cd3-a6b1-d308d7d7cb92", "address": "fa:16:3e:a4:63:0d", "network": {"id": "8888bbd4-6828-4d33-8135-899b225e8a70", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-605557774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38bb1d7cc5574657a98eaefb81321006", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf70a964e-a2", "ovs_interfaceid": "f70a964e-a247-4cd3-a6b1-d308d7d7cb92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.976119] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.860s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.979050] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.201s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.980697] env[61974]: INFO nova.compute.claims [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 962.999431] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379189, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.000515] env[61974]: INFO nova.scheduler.client.report [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Deleted allocations for instance 635f362a-582e-44bc-85d8-8a69943982b0 [ 963.169899] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e2d7f28-8783-4a83-b6f4-89c8cd6ddf9f tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.292s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.170167] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379190, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.434647] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Releasing lock "refresh_cache-68ad5903-e502-406b-a19e-9e4c28aa5035" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.435070] env[61974]: DEBUG nova.compute.manager [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Instance network_info: |[{"id": "f70a964e-a247-4cd3-a6b1-d308d7d7cb92", "address": "fa:16:3e:a4:63:0d", "network": {"id": "8888bbd4-6828-4d33-8135-899b225e8a70", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-605557774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38bb1d7cc5574657a98eaefb81321006", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf70a964e-a2", "ovs_interfaceid": "f70a964e-a247-4cd3-a6b1-d308d7d7cb92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 963.435733] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:63:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '462a7219-4deb-4225-9cf7-3131ef280363', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f70a964e-a247-4cd3-a6b1-d308d7d7cb92', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 963.443469] env[61974]: DEBUG oslo.service.loopingcall [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 963.444014] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 963.444276] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed29b5f4-ac5a-4850-83dc-6893dee85e80 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.468120] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 963.468120] env[61974]: value = "task-1379191" [ 963.468120] env[61974]: _type = "Task" [ 963.468120] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.479032] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379191, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.501827] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379189, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.571044} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.502106] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 450956aa-cc55-481c-acf6-287abc8b8efe/450956aa-cc55-481c-acf6-287abc8b8efe.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 963.502331] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 963.502609] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-23c984e9-8f07-4a0e-a719-eaa1455fa4e6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.509718] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8388584f-f622-4142-90af-0b1efe95fd2e tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "635f362a-582e-44bc-85d8-8a69943982b0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.758s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.510713] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 963.510713] env[61974]: value = "task-1379192" [ 963.510713] env[61974]: _type = "Task" [ 963.510713] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.520046] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379192, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.652024] env[61974]: DEBUG nova.compute.manager [req-9875958b-ec52-4fd6-a94c-a45f082c196a req-5679c956-f295-4628-9f43-07ce15964aac service nova] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Received event network-changed-f70a964e-a247-4cd3-a6b1-d308d7d7cb92 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 963.652235] env[61974]: DEBUG nova.compute.manager [req-9875958b-ec52-4fd6-a94c-a45f082c196a req-5679c956-f295-4628-9f43-07ce15964aac service nova] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Refreshing instance network info cache due to event network-changed-f70a964e-a247-4cd3-a6b1-d308d7d7cb92. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 963.652496] env[61974]: DEBUG oslo_concurrency.lockutils [req-9875958b-ec52-4fd6-a94c-a45f082c196a req-5679c956-f295-4628-9f43-07ce15964aac service nova] Acquiring lock "refresh_cache-68ad5903-e502-406b-a19e-9e4c28aa5035" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.652695] env[61974]: DEBUG oslo_concurrency.lockutils [req-9875958b-ec52-4fd6-a94c-a45f082c196a req-5679c956-f295-4628-9f43-07ce15964aac service nova] Acquired lock "refresh_cache-68ad5903-e502-406b-a19e-9e4c28aa5035" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.652900] env[61974]: DEBUG nova.network.neutron [req-9875958b-ec52-4fd6-a94c-a45f082c196a req-5679c956-f295-4628-9f43-07ce15964aac service nova] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Refreshing network info cache for port f70a964e-a247-4cd3-a6b1-d308d7d7cb92 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 963.672424] env[61974]: DEBUG oslo_vmware.api [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379190, 'name': PowerOnVM_Task, 'duration_secs': 0.964864} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.672675] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 963.672870] env[61974]: INFO nova.compute.manager [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Took 8.35 seconds to spawn the instance on the hypervisor. [ 963.673066] env[61974]: DEBUG nova.compute.manager [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 963.674039] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df8c8035-4d1f-491d-b9a7-4ef809d68e2e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.908770] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "eb6dfd21-0ba6-455c-b14e-80dacaf6b92c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.909012] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "eb6dfd21-0ba6-455c-b14e-80dacaf6b92c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.909256] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "eb6dfd21-0ba6-455c-b14e-80dacaf6b92c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.909455] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "eb6dfd21-0ba6-455c-b14e-80dacaf6b92c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.909633] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "eb6dfd21-0ba6-455c-b14e-80dacaf6b92c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.912215] env[61974]: INFO nova.compute.manager [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Terminating instance [ 963.913937] env[61974]: DEBUG nova.compute.manager [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 963.914157] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 963.915230] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a15ae7-17ad-4a71-aec6-63cef8b8668a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.922597] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 963.922816] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-851db982-b228-4a77-a4c8-c659ffdc5231 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.928877] env[61974]: DEBUG oslo_vmware.api [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for the task: (returnval){ [ 963.928877] env[61974]: value = "task-1379193" [ 963.928877] env[61974]: _type = "Task" [ 963.928877] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.936549] env[61974]: DEBUG oslo_vmware.api [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379193, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.977678] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379191, 'name': CreateVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.980159] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.980387] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.980593] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.980783] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.980958] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.983137] env[61974]: INFO nova.compute.manager [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Terminating instance [ 963.984856] env[61974]: DEBUG nova.compute.manager [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 963.985075] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 963.985792] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d09b91f-6599-4d4d-9874-a535b8c2c2fc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.995972] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 963.996621] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c551bb8-cc75-4d89-8f30-357dd2c87f57 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.001526] env[61974]: DEBUG oslo_vmware.api [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for the task: (returnval){ [ 964.001526] env[61974]: value = "task-1379194" [ 964.001526] env[61974]: _type = "Task" [ 964.001526] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.009520] env[61974]: DEBUG oslo_vmware.api [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379194, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.020618] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379192, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075655} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.020873] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 964.021658] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a24897-5249-421a-8cc8-ae59496d1057 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.047185] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 450956aa-cc55-481c-acf6-287abc8b8efe/450956aa-cc55-481c-acf6-287abc8b8efe.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 964.049943] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef591798-d9a4-4f8e-bdf9-a12f6fa06422 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.070034] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 964.070034] env[61974]: value = "task-1379195" [ 964.070034] env[61974]: _type = "Task" [ 964.070034] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.081920] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379195, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.196375] env[61974]: INFO nova.compute.manager [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Took 27.86 seconds to build instance. [ 964.297779] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4456824c-eb55-4842-90e3-78634cb97d83 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.307152] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8996799-c123-4af9-afdf-087cb77ba394 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.344021] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b701c5e4-51ee-4206-bfe9-ee0a5c81816c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.349130] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f5c73b-ef1a-46db-ad29-de5c1d3c6dd6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.363286] env[61974]: DEBUG nova.compute.provider_tree [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 964.439070] env[61974]: DEBUG oslo_vmware.api [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379193, 'name': PowerOffVM_Task, 'duration_secs': 0.192259} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.439376] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 964.439585] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 964.439871] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f531e25b-dae4-4394-bfb6-a35c58caf0f5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.451933] env[61974]: DEBUG nova.network.neutron [req-9875958b-ec52-4fd6-a94c-a45f082c196a req-5679c956-f295-4628-9f43-07ce15964aac service nova] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Updated VIF entry in instance network info cache for port f70a964e-a247-4cd3-a6b1-d308d7d7cb92. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 964.452299] env[61974]: DEBUG nova.network.neutron [req-9875958b-ec52-4fd6-a94c-a45f082c196a req-5679c956-f295-4628-9f43-07ce15964aac service nova] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Updating instance_info_cache with network_info: [{"id": "f70a964e-a247-4cd3-a6b1-d308d7d7cb92", "address": "fa:16:3e:a4:63:0d", "network": {"id": "8888bbd4-6828-4d33-8135-899b225e8a70", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-605557774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38bb1d7cc5574657a98eaefb81321006", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf70a964e-a2", "ovs_interfaceid": "f70a964e-a247-4cd3-a6b1-d308d7d7cb92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.478910] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379191, 'name': CreateVM_Task, 'duration_secs': 0.532995} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.479316] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 964.480148] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.480475] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.480981] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 964.481987] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-681517c3-235b-40e5-8fff-078f1ebf3f7a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.486427] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 964.486427] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52fa227b-bccc-1db6-2d55-5190285b53d9" [ 964.486427] env[61974]: _type = "Task" [ 964.486427] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.494960] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52fa227b-bccc-1db6-2d55-5190285b53d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.508108] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 964.508108] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 964.510236] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Deleting the datastore file [datastore1] eb6dfd21-0ba6-455c-b14e-80dacaf6b92c {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 964.511481] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a3004d4-8807-420f-b95c-f5007f3be514 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.513853] env[61974]: DEBUG oslo_vmware.api [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379194, 'name': PowerOffVM_Task, 'duration_secs': 0.241642} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.513853] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 964.513853] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 964.514695] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ba498c3-4476-49ea-bebd-0e3fa8fc751d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.517564] env[61974]: DEBUG oslo_vmware.api [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for the task: (returnval){ [ 964.517564] env[61974]: value = "task-1379197" [ 964.517564] env[61974]: _type = "Task" [ 964.517564] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.528134] env[61974]: DEBUG oslo_vmware.api [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379197, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.581188] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379195, 'name': ReconfigVM_Task, 'duration_secs': 0.508144} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.581565] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 450956aa-cc55-481c-acf6-287abc8b8efe/450956aa-cc55-481c-acf6-287abc8b8efe.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 964.582454] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-01ce3d89-0825-4b36-8364-36f15d6cd2ab {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.590080] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 964.590080] env[61974]: value = "task-1379199" [ 964.590080] env[61974]: _type = "Task" [ 964.590080] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.600570] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379199, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.606160] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 964.606385] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 964.606559] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Deleting the datastore file [datastore1] a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 964.606830] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b808110-d7bb-472d-bf26-98ccf6313458 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.613210] env[61974]: DEBUG oslo_vmware.api [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for the task: (returnval){ [ 964.613210] env[61974]: value = "task-1379200" [ 964.613210] env[61974]: _type = "Task" [ 964.613210] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.622436] env[61974]: DEBUG oslo_vmware.api [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379200, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.699346] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afd29b38-3a8c-4ef0-98da-a43f572b9b15 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "c06a7599-58e8-4796-9e95-d96327f649d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.244s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.867059] env[61974]: DEBUG nova.scheduler.client.report [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 964.955168] env[61974]: DEBUG oslo_concurrency.lockutils [req-9875958b-ec52-4fd6-a94c-a45f082c196a req-5679c956-f295-4628-9f43-07ce15964aac service nova] Releasing lock "refresh_cache-68ad5903-e502-406b-a19e-9e4c28aa5035" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.997456] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52fa227b-bccc-1db6-2d55-5190285b53d9, 'name': SearchDatastore_Task, 'duration_secs': 0.011589} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.997745] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.997981] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 964.998247] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.998431] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.998621] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 964.998875] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-197c83a2-da26-4781-9b5c-d83444d96048 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.011658] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 965.011833] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 965.012801] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abd72695-4de6-4766-abc1-b3f4f5273681 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.017472] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 965.017472] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]521af378-3936-6d70-8bf7-061e35d4fdd1" [ 965.017472] env[61974]: _type = "Task" [ 965.017472] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.027370] env[61974]: DEBUG oslo_concurrency.lockutils [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "c06a7599-58e8-4796-9e95-d96327f649d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.027609] env[61974]: DEBUG oslo_concurrency.lockutils [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "c06a7599-58e8-4796-9e95-d96327f649d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.027824] env[61974]: DEBUG oslo_concurrency.lockutils [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "c06a7599-58e8-4796-9e95-d96327f649d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.028023] env[61974]: DEBUG oslo_concurrency.lockutils [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "c06a7599-58e8-4796-9e95-d96327f649d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.028201] env[61974]: DEBUG oslo_concurrency.lockutils [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "c06a7599-58e8-4796-9e95-d96327f649d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.029773] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]521af378-3936-6d70-8bf7-061e35d4fdd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.032917] env[61974]: INFO nova.compute.manager [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Terminating instance [ 965.034254] env[61974]: DEBUG oslo_vmware.api [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379197, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.313545} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.034767] env[61974]: DEBUG nova.compute.manager [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 965.034961] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 965.035232] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 965.035443] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 965.035640] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 965.035815] env[61974]: INFO nova.compute.manager [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Took 1.12 seconds to destroy the instance on the hypervisor. [ 965.036055] env[61974]: DEBUG oslo.service.loopingcall [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 965.036742] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cbeae85-79fa-4203-b0e2-3e038b53a494 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.039120] env[61974]: DEBUG nova.compute.manager [-] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 965.039222] env[61974]: DEBUG nova.network.neutron [-] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 965.044838] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 965.045082] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-40521952-2959-40c7-adcc-86562c893c59 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.052079] env[61974]: DEBUG oslo_vmware.api [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 965.052079] env[61974]: value = "task-1379201" [ 965.052079] env[61974]: _type = "Task" [ 965.052079] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.059597] env[61974]: DEBUG oslo_vmware.api [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379201, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.099344] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379199, 'name': Rename_Task, 'duration_secs': 0.240119} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.099797] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 965.099885] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9c329b6c-0c59-4b9d-8713-faf8f2c7db49 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.105998] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 965.105998] env[61974]: value = "task-1379202" [ 965.105998] env[61974]: _type = "Task" [ 965.105998] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.114005] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379202, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.121878] env[61974]: DEBUG oslo_vmware.api [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Task: {'id': task-1379200, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.290129} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.122156] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 965.122347] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 965.122542] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 965.122725] env[61974]: INFO nova.compute.manager [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Took 1.14 seconds to destroy the instance on the hypervisor. [ 965.122972] env[61974]: DEBUG oslo.service.loopingcall [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 965.123181] env[61974]: DEBUG nova.compute.manager [-] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 965.123278] env[61974]: DEBUG nova.network.neutron [-] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 965.371892] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.393s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.372147] env[61974]: DEBUG nova.compute.manager [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 965.375641] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.705s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.375641] env[61974]: DEBUG nova.objects.instance [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61974) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 965.414388] env[61974]: DEBUG nova.compute.manager [req-f26078ef-762d-4adb-a4c6-59c780426552 req-d3508a3a-c631-4006-acca-6100c5e6c680 service nova] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Received event network-vif-deleted-c1223bcf-9d3f-4e7a-8dee-1bdfb774e108 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 965.414603] env[61974]: INFO nova.compute.manager [req-f26078ef-762d-4adb-a4c6-59c780426552 req-d3508a3a-c631-4006-acca-6100c5e6c680 service nova] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Neutron deleted interface c1223bcf-9d3f-4e7a-8dee-1bdfb774e108; detaching it from the instance and deleting it from the info cache [ 965.415390] env[61974]: DEBUG nova.network.neutron [req-f26078ef-762d-4adb-a4c6-59c780426552 req-d3508a3a-c631-4006-acca-6100c5e6c680 service nova] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.527568] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]521af378-3936-6d70-8bf7-061e35d4fdd1, 'name': SearchDatastore_Task, 'duration_secs': 0.027576} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.528480] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99c9a00d-3021-4fe8-bea5-f6877c82dc14 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.534126] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 965.534126] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52907d11-fff5-cf77-965b-bb55a22155aa" [ 965.534126] env[61974]: _type = "Task" [ 965.534126] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.542506] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52907d11-fff5-cf77-965b-bb55a22155aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.561646] env[61974]: DEBUG oslo_vmware.api [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379201, 'name': PowerOffVM_Task, 'duration_secs': 0.239565} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.561924] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 965.562113] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 965.562366] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09128ed6-2ab8-4dc5-bbfa-db5acaafec0b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.616060] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379202, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.625014] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 965.625370] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 965.625448] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Deleting the datastore file [datastore1] c06a7599-58e8-4796-9e95-d96327f649d0 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 965.625726] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f9cd18c-6356-4c9e-8ed6-73953966227a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.632500] env[61974]: DEBUG oslo_vmware.api [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 965.632500] env[61974]: value = "task-1379204" [ 965.632500] env[61974]: _type = "Task" [ 965.632500] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.641135] env[61974]: DEBUG oslo_vmware.api [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379204, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.682151] env[61974]: DEBUG nova.compute.manager [req-b4a322e1-330b-40b8-ab22-84661edcaa1b req-80d988b6-88d5-4d9b-be41-5bfa5712c7f1 service nova] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Received event network-vif-deleted-8004aa93-735a-4494-97c5-cdc9e33eedb9 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 965.682381] env[61974]: INFO nova.compute.manager [req-b4a322e1-330b-40b8-ab22-84661edcaa1b req-80d988b6-88d5-4d9b-be41-5bfa5712c7f1 service nova] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Neutron deleted interface 8004aa93-735a-4494-97c5-cdc9e33eedb9; detaching it from the instance and deleting it from the info cache [ 965.682562] env[61974]: DEBUG nova.network.neutron [req-b4a322e1-330b-40b8-ab22-84661edcaa1b req-80d988b6-88d5-4d9b-be41-5bfa5712c7f1 service nova] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.778896] env[61974]: DEBUG nova.network.neutron [-] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.869931] env[61974]: DEBUG nova.network.neutron [-] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.879871] env[61974]: DEBUG nova.compute.utils [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 965.883629] env[61974]: DEBUG nova.compute.manager [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 965.883720] env[61974]: DEBUG nova.network.neutron [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 965.918638] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-11c68837-8e12-400b-bc51-458db1d89370 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.927583] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06a6d81-1b6f-4f08-9085-fde9cb98855f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.939025] env[61974]: DEBUG nova.policy [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '92306aad15774bb19f9ad1766e4049aa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38bb1d7cc5574657a98eaefb81321006', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 965.957344] env[61974]: DEBUG nova.compute.manager [req-f26078ef-762d-4adb-a4c6-59c780426552 req-d3508a3a-c631-4006-acca-6100c5e6c680 service nova] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Detach interface failed, port_id=c1223bcf-9d3f-4e7a-8dee-1bdfb774e108, reason: Instance a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 966.043759] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52907d11-fff5-cf77-965b-bb55a22155aa, 'name': SearchDatastore_Task, 'duration_secs': 0.010416} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.044047] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.044649] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 68ad5903-e502-406b-a19e-9e4c28aa5035/68ad5903-e502-406b-a19e-9e4c28aa5035.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 966.044649] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-78fd423e-70fe-4478-89bb-f37d50bb488b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.051034] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 966.051034] env[61974]: value = "task-1379205" [ 966.051034] env[61974]: _type = "Task" [ 966.051034] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.060946] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379205, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.116140] env[61974]: DEBUG oslo_vmware.api [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379202, 'name': PowerOnVM_Task, 'duration_secs': 0.615922} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.116426] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 966.116639] env[61974]: INFO nova.compute.manager [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Took 8.38 seconds to spawn the instance on the hypervisor. [ 966.116824] env[61974]: DEBUG nova.compute.manager [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 966.117625] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-158ce6bd-f0b2-4278-930b-a6b5ac1de653 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.140637] env[61974]: DEBUG oslo_vmware.api [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379204, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137697} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.140969] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 966.141279] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 966.141594] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 966.141872] env[61974]: INFO nova.compute.manager [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Took 1.11 seconds to destroy the instance on the hypervisor. [ 966.142172] env[61974]: DEBUG oslo.service.loopingcall [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 966.142387] env[61974]: DEBUG nova.compute.manager [-] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 966.142495] env[61974]: DEBUG nova.network.neutron [-] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 966.185243] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-533438dc-be6d-4b75-85eb-2439580a740d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.194261] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29171a5-4f26-40c5-b90b-ed849c727fcf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.205459] env[61974]: DEBUG nova.network.neutron [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Successfully created port: cdd55c8c-057f-4dfc-b401-906339145fc3 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 966.224439] env[61974]: DEBUG nova.compute.manager [req-b4a322e1-330b-40b8-ab22-84661edcaa1b req-80d988b6-88d5-4d9b-be41-5bfa5712c7f1 service nova] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Detach interface failed, port_id=8004aa93-735a-4494-97c5-cdc9e33eedb9, reason: Instance eb6dfd21-0ba6-455c-b14e-80dacaf6b92c could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 966.281897] env[61974]: INFO nova.compute.manager [-] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Took 1.24 seconds to deallocate network for instance. [ 966.372992] env[61974]: INFO nova.compute.manager [-] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Took 1.25 seconds to deallocate network for instance. [ 966.385906] env[61974]: DEBUG nova.compute.manager [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 966.392192] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1aaf3499-7697-4954-89db-af8ee96e0348 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.398412] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.913s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.400664] env[61974]: INFO nova.compute.claims [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 966.564162] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379205, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.635915] env[61974]: INFO nova.compute.manager [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Took 29.86 seconds to build instance. [ 966.788571] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.880407] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.934755] env[61974]: DEBUG nova.network.neutron [-] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.063690] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379205, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.53651} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.063984] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 68ad5903-e502-406b-a19e-9e4c28aa5035/68ad5903-e502-406b-a19e-9e4c28aa5035.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 967.064182] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 967.064432] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-12b396ae-5f59-4ceb-a927-ac190787846b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.070408] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 967.070408] env[61974]: value = "task-1379206" [ 967.070408] env[61974]: _type = "Task" [ 967.070408] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.767761] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b32068ca-798e-452e-b2e0-3e37cbc556d7 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "450956aa-cc55-481c-acf6-287abc8b8efe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.332s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.768820] env[61974]: DEBUG nova.compute.manager [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 967.772976] env[61974]: INFO nova.compute.manager [-] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Took 1.63 seconds to deallocate network for instance. [ 967.774320] env[61974]: DEBUG nova.compute.manager [req-efb19d0d-7789-4a8b-a56e-90472f40773c req-e432aa81-5ffe-4f6a-8f0d-eaeaad232024 service nova] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Received event network-vif-deleted-1b95be55-2118-4629-b9c8-0063c54f9e94 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 967.779837] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379206, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.360848} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.782471] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 967.783452] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7138e5e1-b3ca-4eb7-b855-aef27ddf4c6b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.809242] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 68ad5903-e502-406b-a19e-9e4c28aa5035/68ad5903-e502-406b-a19e-9e4c28aa5035.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 967.811563] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c5e2bda-1c71-437b-b72f-f3b58f841eac {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.835768] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 967.835768] env[61974]: value = "task-1379207" [ 967.835768] env[61974]: _type = "Task" [ 967.835768] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.838046] env[61974]: DEBUG nova.virt.hardware [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 967.839927] env[61974]: DEBUG nova.virt.hardware [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 967.839927] env[61974]: DEBUG nova.virt.hardware [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 967.839927] env[61974]: DEBUG nova.virt.hardware [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 967.839927] env[61974]: DEBUG nova.virt.hardware [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 967.839927] env[61974]: DEBUG nova.virt.hardware [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 967.839927] env[61974]: DEBUG nova.virt.hardware [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 967.839927] env[61974]: DEBUG nova.virt.hardware [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 967.839927] env[61974]: DEBUG nova.virt.hardware [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 967.840452] env[61974]: DEBUG nova.virt.hardware [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 967.840452] env[61974]: DEBUG nova.virt.hardware [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 967.841351] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db01d365-a3e5-4119-81a1-be8a32d353f4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.858835] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd3f45a-f090-4fe2-aecc-b558269f0351 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.863072] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379207, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.986260] env[61974]: DEBUG nova.compute.manager [req-973545ec-eb68-41dd-8500-8caf00b01ef9 req-5e287420-55e4-4632-8eed-92a231920ae0 service nova] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Received event network-vif-plugged-cdd55c8c-057f-4dfc-b401-906339145fc3 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 967.986482] env[61974]: DEBUG oslo_concurrency.lockutils [req-973545ec-eb68-41dd-8500-8caf00b01ef9 req-5e287420-55e4-4632-8eed-92a231920ae0 service nova] Acquiring lock "90f8acb1-a0b5-4459-a9d7-c12f652b0b51-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.986907] env[61974]: DEBUG oslo_concurrency.lockutils [req-973545ec-eb68-41dd-8500-8caf00b01ef9 req-5e287420-55e4-4632-8eed-92a231920ae0 service nova] Lock "90f8acb1-a0b5-4459-a9d7-c12f652b0b51-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.986907] env[61974]: DEBUG oslo_concurrency.lockutils [req-973545ec-eb68-41dd-8500-8caf00b01ef9 req-5e287420-55e4-4632-8eed-92a231920ae0 service nova] Lock "90f8acb1-a0b5-4459-a9d7-c12f652b0b51-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.987165] env[61974]: DEBUG nova.compute.manager [req-973545ec-eb68-41dd-8500-8caf00b01ef9 req-5e287420-55e4-4632-8eed-92a231920ae0 service nova] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] No waiting events found dispatching network-vif-plugged-cdd55c8c-057f-4dfc-b401-906339145fc3 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 967.987353] env[61974]: WARNING nova.compute.manager [req-973545ec-eb68-41dd-8500-8caf00b01ef9 req-5e287420-55e4-4632-8eed-92a231920ae0 service nova] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Received unexpected event network-vif-plugged-cdd55c8c-057f-4dfc-b401-906339145fc3 for instance with vm_state building and task_state spawning. [ 968.103629] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd15b4c-a6b2-479c-9c90-25a7e18141bc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.111184] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2eeb152-68ba-4c14-9ed1-9ef47bfea64b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.141859] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e32cb4-edca-4e8b-9cf8-f7bd47cf551e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.148941] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bde7798-46aa-4407-a9f4-0233f55978e3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.161609] env[61974]: DEBUG nova.compute.provider_tree [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.284411] env[61974]: DEBUG oslo_concurrency.lockutils [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.351143] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379207, 'name': ReconfigVM_Task, 'duration_secs': 0.272838} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.351494] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 68ad5903-e502-406b-a19e-9e4c28aa5035/68ad5903-e502-406b-a19e-9e4c28aa5035.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 968.352182] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6a9ebae9-8744-427a-a57b-8f67a3e4a976 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.358679] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 968.358679] env[61974]: value = "task-1379208" [ 968.358679] env[61974]: _type = "Task" [ 968.358679] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.367235] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379208, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.540749] env[61974]: DEBUG nova.network.neutron [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Successfully updated port: cdd55c8c-057f-4dfc-b401-906339145fc3 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 968.566433] env[61974]: DEBUG nova.compute.manager [req-5fba2ea9-55c2-46bd-9342-e62b4950bb8e req-6368e08c-43af-4cfd-adac-6ea6a1b268e3 service nova] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Received event network-changed-cdd55c8c-057f-4dfc-b401-906339145fc3 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 968.566531] env[61974]: DEBUG nova.compute.manager [req-5fba2ea9-55c2-46bd-9342-e62b4950bb8e req-6368e08c-43af-4cfd-adac-6ea6a1b268e3 service nova] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Refreshing instance network info cache due to event network-changed-cdd55c8c-057f-4dfc-b401-906339145fc3. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 968.566760] env[61974]: DEBUG oslo_concurrency.lockutils [req-5fba2ea9-55c2-46bd-9342-e62b4950bb8e req-6368e08c-43af-4cfd-adac-6ea6a1b268e3 service nova] Acquiring lock "refresh_cache-90f8acb1-a0b5-4459-a9d7-c12f652b0b51" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.566947] env[61974]: DEBUG oslo_concurrency.lockutils [req-5fba2ea9-55c2-46bd-9342-e62b4950bb8e req-6368e08c-43af-4cfd-adac-6ea6a1b268e3 service nova] Acquired lock "refresh_cache-90f8acb1-a0b5-4459-a9d7-c12f652b0b51" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.567135] env[61974]: DEBUG nova.network.neutron [req-5fba2ea9-55c2-46bd-9342-e62b4950bb8e req-6368e08c-43af-4cfd-adac-6ea6a1b268e3 service nova] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Refreshing network info cache for port cdd55c8c-057f-4dfc-b401-906339145fc3 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 968.664741] env[61974]: DEBUG nova.scheduler.client.report [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 968.868487] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379208, 'name': Rename_Task, 'duration_secs': 0.137859} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.868821] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 968.869238] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-502b60fc-3609-4dcb-bb92-5a392be3d85b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.875884] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 968.875884] env[61974]: value = "task-1379209" [ 968.875884] env[61974]: _type = "Task" [ 968.875884] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.884072] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379209, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.044310] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "refresh_cache-90f8acb1-a0b5-4459-a9d7-c12f652b0b51" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 969.099603] env[61974]: DEBUG nova.network.neutron [req-5fba2ea9-55c2-46bd-9342-e62b4950bb8e req-6368e08c-43af-4cfd-adac-6ea6a1b268e3 service nova] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 969.170111] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.776s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.170755] env[61974]: DEBUG nova.compute.manager [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 969.173330] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.286s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.174714] env[61974]: INFO nova.compute.claims [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 969.182616] env[61974]: DEBUG nova.network.neutron [req-5fba2ea9-55c2-46bd-9342-e62b4950bb8e req-6368e08c-43af-4cfd-adac-6ea6a1b268e3 service nova] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.386305] env[61974]: DEBUG oslo_vmware.api [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379209, 'name': PowerOnVM_Task, 'duration_secs': 0.441564} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.386591] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 969.386820] env[61974]: INFO nova.compute.manager [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Took 8.17 seconds to spawn the instance on the hypervisor. [ 969.387013] env[61974]: DEBUG nova.compute.manager [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 969.387757] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49af1751-f79c-4c50-9e9a-b8d878a98b18 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.679413] env[61974]: DEBUG nova.compute.utils [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 969.683013] env[61974]: DEBUG nova.compute.manager [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 969.683013] env[61974]: DEBUG nova.network.neutron [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 969.685176] env[61974]: DEBUG oslo_concurrency.lockutils [req-5fba2ea9-55c2-46bd-9342-e62b4950bb8e req-6368e08c-43af-4cfd-adac-6ea6a1b268e3 service nova] Releasing lock "refresh_cache-90f8acb1-a0b5-4459-a9d7-c12f652b0b51" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.685317] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquired lock "refresh_cache-90f8acb1-a0b5-4459-a9d7-c12f652b0b51" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.685468] env[61974]: DEBUG nova.network.neutron [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 969.742581] env[61974]: DEBUG nova.policy [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6173db476e814cbaa6b3278cfa527bbb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7dae05232e0041dba49b0432d64d82d2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 969.905095] env[61974]: INFO nova.compute.manager [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Took 30.80 seconds to build instance. [ 970.037667] env[61974]: DEBUG nova.network.neutron [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Successfully created port: f2f9e10a-4e37-47fa-8040-638e6376acc6 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 970.183963] env[61974]: DEBUG nova.compute.manager [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 970.203314] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b53e8a35-e7b0-4999-8c78-bd723a0d76b1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "1c1404fd-a954-4849-883b-7898a7e87e2b" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.203763] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b53e8a35-e7b0-4999-8c78-bd723a0d76b1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "1c1404fd-a954-4849-883b-7898a7e87e2b" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.230040] env[61974]: DEBUG nova.network.neutron [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 970.379253] env[61974]: DEBUG nova.network.neutron [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Updating instance_info_cache with network_info: [{"id": "cdd55c8c-057f-4dfc-b401-906339145fc3", "address": "fa:16:3e:a6:69:5b", "network": {"id": "8888bbd4-6828-4d33-8135-899b225e8a70", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-605557774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38bb1d7cc5574657a98eaefb81321006", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdd55c8c-05", "ovs_interfaceid": "cdd55c8c-057f-4dfc-b401-906339145fc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.407849] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bafb6175-8921-4b70-aede-6d00cc99e0d9 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "68ad5903-e502-406b-a19e-9e4c28aa5035" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.583s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.471096] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54bfa218-ab4d-4e19-af7a-a0ea505709e8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.478852] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb757ea-da8b-408d-b5e5-dbc73f431883 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.509214] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0706bbe4-45a8-4260-b828-df2be10561e4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.516746] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c9881e-a3e4-47a0-8313-effbb5a6d68c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.530259] env[61974]: DEBUG nova.compute.provider_tree [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 970.707979] env[61974]: DEBUG nova.compute.utils [None req-b53e8a35-e7b0-4999-8c78-bd723a0d76b1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 970.882101] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Releasing lock "refresh_cache-90f8acb1-a0b5-4459-a9d7-c12f652b0b51" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.882462] env[61974]: DEBUG nova.compute.manager [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Instance network_info: |[{"id": "cdd55c8c-057f-4dfc-b401-906339145fc3", "address": "fa:16:3e:a6:69:5b", "network": {"id": "8888bbd4-6828-4d33-8135-899b225e8a70", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-605557774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38bb1d7cc5574657a98eaefb81321006", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdd55c8c-05", "ovs_interfaceid": "cdd55c8c-057f-4dfc-b401-906339145fc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 970.882911] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:69:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '462a7219-4deb-4225-9cf7-3131ef280363', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cdd55c8c-057f-4dfc-b401-906339145fc3', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 970.890510] env[61974]: DEBUG oslo.service.loopingcall [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 970.890747] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 970.891367] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-114666d9-5d87-4aa5-85ef-2f8a26a6da9d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.910605] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 970.910605] env[61974]: value = "task-1379210" [ 970.910605] env[61974]: _type = "Task" [ 970.910605] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.918279] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379210, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.033186] env[61974]: DEBUG nova.scheduler.client.report [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 971.199108] env[61974]: DEBUG nova.compute.manager [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 971.211987] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b53e8a35-e7b0-4999-8c78-bd723a0d76b1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "1c1404fd-a954-4849-883b-7898a7e87e2b" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.229605] env[61974]: DEBUG nova.virt.hardware [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 971.229853] env[61974]: DEBUG nova.virt.hardware [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 971.230032] env[61974]: DEBUG nova.virt.hardware [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 971.230227] env[61974]: DEBUG nova.virt.hardware [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 971.230381] env[61974]: DEBUG nova.virt.hardware [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 971.230536] env[61974]: DEBUG nova.virt.hardware [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 971.230742] env[61974]: DEBUG nova.virt.hardware [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 971.230925] env[61974]: DEBUG nova.virt.hardware [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 971.231106] env[61974]: DEBUG nova.virt.hardware [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 971.231271] env[61974]: DEBUG nova.virt.hardware [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 971.231437] env[61974]: DEBUG nova.virt.hardware [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 971.232312] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b58dd37-dbae-4181-83b7-40a7c24f8666 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.240380] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-038ef385-a62e-4ac5-8f8d-581b606863fb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.423781] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379210, 'name': CreateVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.539185] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.366s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.539919] env[61974]: DEBUG nova.compute.manager [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 971.542563] env[61974]: DEBUG oslo_concurrency.lockutils [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.848s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.542784] env[61974]: DEBUG nova.objects.instance [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lazy-loading 'resources' on Instance uuid 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 971.662025] env[61974]: DEBUG nova.compute.manager [req-74740ccd-66d7-44f6-9fdc-699b332d1061 req-7b4fdbb0-cdc3-4f24-9c4f-3d788e1e24bc service nova] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Received event network-vif-plugged-f2f9e10a-4e37-47fa-8040-638e6376acc6 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 971.662252] env[61974]: DEBUG oslo_concurrency.lockutils [req-74740ccd-66d7-44f6-9fdc-699b332d1061 req-7b4fdbb0-cdc3-4f24-9c4f-3d788e1e24bc service nova] Acquiring lock "abe0168a-e838-468a-a223-7c2a64497c0c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.662471] env[61974]: DEBUG oslo_concurrency.lockutils [req-74740ccd-66d7-44f6-9fdc-699b332d1061 req-7b4fdbb0-cdc3-4f24-9c4f-3d788e1e24bc service nova] Lock "abe0168a-e838-468a-a223-7c2a64497c0c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.662713] env[61974]: DEBUG oslo_concurrency.lockutils [req-74740ccd-66d7-44f6-9fdc-699b332d1061 req-7b4fdbb0-cdc3-4f24-9c4f-3d788e1e24bc service nova] Lock "abe0168a-e838-468a-a223-7c2a64497c0c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.662847] env[61974]: DEBUG nova.compute.manager [req-74740ccd-66d7-44f6-9fdc-699b332d1061 req-7b4fdbb0-cdc3-4f24-9c4f-3d788e1e24bc service nova] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] No waiting events found dispatching network-vif-plugged-f2f9e10a-4e37-47fa-8040-638e6376acc6 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 971.663133] env[61974]: WARNING nova.compute.manager [req-74740ccd-66d7-44f6-9fdc-699b332d1061 req-7b4fdbb0-cdc3-4f24-9c4f-3d788e1e24bc service nova] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Received unexpected event network-vif-plugged-f2f9e10a-4e37-47fa-8040-638e6376acc6 for instance with vm_state building and task_state spawning. [ 971.687621] env[61974]: DEBUG nova.network.neutron [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Successfully updated port: f2f9e10a-4e37-47fa-8040-638e6376acc6 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 971.921586] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379210, 'name': CreateVM_Task, 'duration_secs': 0.84873} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.921875] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 971.922494] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.922678] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.923043] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 971.923293] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed176ad1-dd91-4165-a2e0-6f2e51e7fe83 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.928251] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 971.928251] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52b3dd8e-321c-69d6-38a6-199cececa98a" [ 971.928251] env[61974]: _type = "Task" [ 971.928251] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.936241] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52b3dd8e-321c-69d6-38a6-199cececa98a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.045950] env[61974]: DEBUG nova.compute.utils [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 972.051510] env[61974]: DEBUG nova.compute.manager [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 972.051510] env[61974]: DEBUG nova.network.neutron [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 972.094837] env[61974]: DEBUG nova.policy [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c5f6d80a0784b1f8ff2b2fcfbb44232', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '40e43abf62a5464091aa725e1cff2b50', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 972.190105] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "refresh_cache-abe0168a-e838-468a-a223-7c2a64497c0c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.190272] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "refresh_cache-abe0168a-e838-468a-a223-7c2a64497c0c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.190424] env[61974]: DEBUG nova.network.neutron [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 972.270487] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b53e8a35-e7b0-4999-8c78-bd723a0d76b1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "1c1404fd-a954-4849-883b-7898a7e87e2b" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.270741] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b53e8a35-e7b0-4999-8c78-bd723a0d76b1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "1c1404fd-a954-4849-883b-7898a7e87e2b" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.270972] env[61974]: INFO nova.compute.manager [None req-b53e8a35-e7b0-4999-8c78-bd723a0d76b1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Attaching volume 8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2 to /dev/sdb [ 972.305434] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d6765f-bae7-4301-9656-414722801948 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.315013] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515d3307-30d8-42f0-b714-b3132185ab23 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.327991] env[61974]: DEBUG nova.virt.block_device [None req-b53e8a35-e7b0-4999-8c78-bd723a0d76b1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Updating existing volume attachment record: 3c945661-07f7-4029-a295-9ea148ee23f6 {{(pid=61974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 972.335130] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac07621-6084-415a-bd0f-2a29d1c80c19 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.342328] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e11abc-8b68-426a-ab70-fa5cb9a6fe16 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.373213] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c2d6050-2d7d-4dbc-9516-d909ab652927 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.376377] env[61974]: DEBUG nova.network.neutron [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Successfully created port: 1dbdb496-b82f-458d-a43d-7575a27ca979 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 972.383077] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd33c5a8-21ba-41d3-9c4a-df2ba33c5c18 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.396732] env[61974]: DEBUG nova.compute.provider_tree [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 972.439326] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52b3dd8e-321c-69d6-38a6-199cececa98a, 'name': SearchDatastore_Task, 'duration_secs': 0.010707} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.439596] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.439840] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 972.440146] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.440308] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.440577] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 972.440749] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b660ebe-f675-4194-a556-660381fc1d8f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.451023] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 972.451023] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 972.451023] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3e783ce-39c2-4453-8ee1-b27b1818f673 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.456250] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 972.456250] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52e08414-9ced-848f-ddf1-2b3d0dde4d0f" [ 972.456250] env[61974]: _type = "Task" [ 972.456250] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.464075] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52e08414-9ced-848f-ddf1-2b3d0dde4d0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.551892] env[61974]: DEBUG nova.compute.manager [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 972.725286] env[61974]: DEBUG nova.network.neutron [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 972.874980] env[61974]: DEBUG nova.network.neutron [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Updating instance_info_cache with network_info: [{"id": "f2f9e10a-4e37-47fa-8040-638e6376acc6", "address": "fa:16:3e:50:e7:ee", "network": {"id": "b42774a0-686b-4132-a599-7cec777b9919", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1826867553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dae05232e0041dba49b0432d64d82d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2f9e10a-4e", "ovs_interfaceid": "f2f9e10a-4e37-47fa-8040-638e6376acc6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.899972] env[61974]: DEBUG nova.scheduler.client.report [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 972.967152] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52e08414-9ced-848f-ddf1-2b3d0dde4d0f, 'name': SearchDatastore_Task, 'duration_secs': 0.019913} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.968039] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9b776e6-d064-40b5-91db-e9c1d0c03590 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.973682] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 972.973682] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]529329ad-9ddc-28a1-f3e4-53f7b1ef8aed" [ 972.973682] env[61974]: _type = "Task" [ 972.973682] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.981923] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]529329ad-9ddc-28a1-f3e4-53f7b1ef8aed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.377718] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "refresh_cache-abe0168a-e838-468a-a223-7c2a64497c0c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.378184] env[61974]: DEBUG nova.compute.manager [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Instance network_info: |[{"id": "f2f9e10a-4e37-47fa-8040-638e6376acc6", "address": "fa:16:3e:50:e7:ee", "network": {"id": "b42774a0-686b-4132-a599-7cec777b9919", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1826867553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dae05232e0041dba49b0432d64d82d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2f9e10a-4e", "ovs_interfaceid": "f2f9e10a-4e37-47fa-8040-638e6376acc6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 973.378678] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:e7:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f4399275-8e92-4448-be9e-d4984e93e89c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f2f9e10a-4e37-47fa-8040-638e6376acc6', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 973.386309] env[61974]: DEBUG oslo.service.loopingcall [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 973.386522] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 973.386759] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f018c441-54e6-4256-8e32-17062c479aa6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.404862] env[61974]: DEBUG oslo_concurrency.lockutils [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.862s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.408496] env[61974]: DEBUG oslo_concurrency.lockutils [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.862s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.408739] env[61974]: DEBUG nova.objects.instance [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Lazy-loading 'resources' on Instance uuid 7b338210-5be8-4838-b815-8f2c6cc19ccd {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 973.409881] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 973.409881] env[61974]: value = "task-1379214" [ 973.409881] env[61974]: _type = "Task" [ 973.409881] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.419072] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379214, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.429067] env[61974]: INFO nova.scheduler.client.report [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Deleted allocations for instance 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c [ 973.486583] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]529329ad-9ddc-28a1-f3e4-53f7b1ef8aed, 'name': SearchDatastore_Task, 'duration_secs': 0.011505} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.486868] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.487143] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 90f8acb1-a0b5-4459-a9d7-c12f652b0b51/90f8acb1-a0b5-4459-a9d7-c12f652b0b51.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 973.487421] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-18f2d255-a333-4a7a-b2d1-5f9bbdc6e96a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.494539] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 973.494539] env[61974]: value = "task-1379215" [ 973.494539] env[61974]: _type = "Task" [ 973.494539] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.504686] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379215, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.561152] env[61974]: DEBUG nova.compute.manager [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 973.591068] env[61974]: DEBUG nova.virt.hardware [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 973.591376] env[61974]: DEBUG nova.virt.hardware [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 973.591611] env[61974]: DEBUG nova.virt.hardware [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 973.591777] env[61974]: DEBUG nova.virt.hardware [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 973.591937] env[61974]: DEBUG nova.virt.hardware [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 973.592121] env[61974]: DEBUG nova.virt.hardware [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 973.592348] env[61974]: DEBUG nova.virt.hardware [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 973.592544] env[61974]: DEBUG nova.virt.hardware [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 973.592739] env[61974]: DEBUG nova.virt.hardware [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 973.592924] env[61974]: DEBUG nova.virt.hardware [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 973.593135] env[61974]: DEBUG nova.virt.hardware [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 973.594042] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1d7f3c-1c7d-4365-82c3-fbf88bc52934 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.602883] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f10a2c-53e4-4677-9864-67187926d6aa {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.696764] env[61974]: DEBUG nova.compute.manager [req-76d3f2f3-5a3b-4677-afad-994be5b2b93a req-335a41f9-b25b-4b74-95c6-5060cbe7f746 service nova] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Received event network-changed-f2f9e10a-4e37-47fa-8040-638e6376acc6 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 973.696963] env[61974]: DEBUG nova.compute.manager [req-76d3f2f3-5a3b-4677-afad-994be5b2b93a req-335a41f9-b25b-4b74-95c6-5060cbe7f746 service nova] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Refreshing instance network info cache due to event network-changed-f2f9e10a-4e37-47fa-8040-638e6376acc6. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 973.697201] env[61974]: DEBUG oslo_concurrency.lockutils [req-76d3f2f3-5a3b-4677-afad-994be5b2b93a req-335a41f9-b25b-4b74-95c6-5060cbe7f746 service nova] Acquiring lock "refresh_cache-abe0168a-e838-468a-a223-7c2a64497c0c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.697466] env[61974]: DEBUG oslo_concurrency.lockutils [req-76d3f2f3-5a3b-4677-afad-994be5b2b93a req-335a41f9-b25b-4b74-95c6-5060cbe7f746 service nova] Acquired lock "refresh_cache-abe0168a-e838-468a-a223-7c2a64497c0c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.697515] env[61974]: DEBUG nova.network.neutron [req-76d3f2f3-5a3b-4677-afad-994be5b2b93a req-335a41f9-b25b-4b74-95c6-5060cbe7f746 service nova] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Refreshing network info cache for port f2f9e10a-4e37-47fa-8040-638e6376acc6 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 973.928932] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379214, 'name': CreateVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.939657] env[61974]: DEBUG oslo_concurrency.lockutils [None req-92d4d92b-1d62-4b42-8dcf-bd2093a0cdff tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "1aa2a63c-e352-4c9b-9445-9b45bf3ae14c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.810s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.013878] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379215, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.014814] env[61974]: DEBUG nova.network.neutron [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Successfully updated port: 1dbdb496-b82f-458d-a43d-7575a27ca979 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 974.176178] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb2cff21-8372-45c0-a3ab-c79b1b5e4b22 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.183823] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6685a0-33ae-4b98-a37e-47741bfcc22a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.217639] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d84dfa6-7ddc-440b-86ea-9fb556f5d95a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.226166] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c40357eb-8c7f-4e64-b2c2-6781a2881c20 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.240113] env[61974]: DEBUG nova.compute.provider_tree [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 974.424936] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379214, 'name': CreateVM_Task, 'duration_secs': 0.55859} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.425233] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 974.425982] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.426415] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.426764] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 974.427071] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef0e28da-6593-4d18-b449-26d78a2435b1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.434989] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 974.434989] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]528f823c-dcec-e525-7b3b-bc55b06e1282" [ 974.434989] env[61974]: _type = "Task" [ 974.434989] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.443107] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]528f823c-dcec-e525-7b3b-bc55b06e1282, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.502017] env[61974]: DEBUG nova.network.neutron [req-76d3f2f3-5a3b-4677-afad-994be5b2b93a req-335a41f9-b25b-4b74-95c6-5060cbe7f746 service nova] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Updated VIF entry in instance network info cache for port f2f9e10a-4e37-47fa-8040-638e6376acc6. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 974.502139] env[61974]: DEBUG nova.network.neutron [req-76d3f2f3-5a3b-4677-afad-994be5b2b93a req-335a41f9-b25b-4b74-95c6-5060cbe7f746 service nova] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Updating instance_info_cache with network_info: [{"id": "f2f9e10a-4e37-47fa-8040-638e6376acc6", "address": "fa:16:3e:50:e7:ee", "network": {"id": "b42774a0-686b-4132-a599-7cec777b9919", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1826867553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dae05232e0041dba49b0432d64d82d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2f9e10a-4e", "ovs_interfaceid": "f2f9e10a-4e37-47fa-8040-638e6376acc6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.509777] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379215, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.541991} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.510300] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 90f8acb1-a0b5-4459-a9d7-c12f652b0b51/90f8acb1-a0b5-4459-a9d7-c12f652b0b51.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 974.510608] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 974.511348] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-21ef4a04-d288-4721-ae7a-16672dbc0e04 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.517405] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 974.517405] env[61974]: value = "task-1379216" [ 974.517405] env[61974]: _type = "Task" [ 974.517405] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.521367] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "refresh_cache-5780d1d6-cd40-4b97-8a68-072c090540af" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.521540] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquired lock "refresh_cache-5780d1d6-cd40-4b97-8a68-072c090540af" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.521702] env[61974]: DEBUG nova.network.neutron [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 974.529852] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379216, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.743870] env[61974]: DEBUG nova.scheduler.client.report [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 974.762977] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "b1fa5433-8f26-48db-a19d-d1e11245fb44" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.763421] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "b1fa5433-8f26-48db-a19d-d1e11245fb44" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.945156] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]528f823c-dcec-e525-7b3b-bc55b06e1282, 'name': SearchDatastore_Task, 'duration_secs': 0.074796} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.945577] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.945911] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 974.946541] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.946785] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.946996] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 974.947282] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-27c58981-0467-4a30-b09c-85a44baba63e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.973983] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 974.974202] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 974.974958] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3aba2347-9c38-43fb-b021-a9789893b548 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.981569] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 974.981569] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52a7895f-cb91-6f23-8a8a-2debe98cb727" [ 974.981569] env[61974]: _type = "Task" [ 974.981569] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.988661] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52a7895f-cb91-6f23-8a8a-2debe98cb727, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.005525] env[61974]: DEBUG oslo_concurrency.lockutils [req-76d3f2f3-5a3b-4677-afad-994be5b2b93a req-335a41f9-b25b-4b74-95c6-5060cbe7f746 service nova] Releasing lock "refresh_cache-abe0168a-e838-468a-a223-7c2a64497c0c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.027720] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379216, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074617} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.028060] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 975.029133] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43ba8498-76c6-458d-a397-b57c09f68575 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.051084] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 90f8acb1-a0b5-4459-a9d7-c12f652b0b51/90f8acb1-a0b5-4459-a9d7-c12f652b0b51.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 975.051611] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32f65050-5443-48e7-ba94-55018d69d0fd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.067251] env[61974]: DEBUG nova.network.neutron [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 975.075410] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 975.075410] env[61974]: value = "task-1379218" [ 975.075410] env[61974]: _type = "Task" [ 975.075410] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.083462] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379218, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.248899] env[61974]: DEBUG oslo_concurrency.lockutils [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.840s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.251353] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.578s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.251720] env[61974]: DEBUG nova.objects.instance [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Lazy-loading 'resources' on Instance uuid 097ad079-9712-4183-9135-b15ad3a65d6d {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 975.253466] env[61974]: DEBUG nova.network.neutron [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Updating instance_info_cache with network_info: [{"id": "1dbdb496-b82f-458d-a43d-7575a27ca979", "address": "fa:16:3e:3f:00:b8", "network": {"id": "870e2179-016d-4c2a-8dff-b56143e5db7b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1966833648-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40e43abf62a5464091aa725e1cff2b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dbdb496-b8", "ovs_interfaceid": "1dbdb496-b82f-458d-a43d-7575a27ca979", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.267353] env[61974]: DEBUG nova.compute.utils [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 975.269633] env[61974]: INFO nova.scheduler.client.report [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Deleted allocations for instance 7b338210-5be8-4838-b815-8f2c6cc19ccd [ 975.492630] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52a7895f-cb91-6f23-8a8a-2debe98cb727, 'name': SearchDatastore_Task, 'duration_secs': 0.029875} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.493563] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bbb4864-e754-4631-a330-43e82bcc0408 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.498700] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 975.498700] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52ab5f6e-5a68-0ee0-76d6-88202d976b00" [ 975.498700] env[61974]: _type = "Task" [ 975.498700] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.506279] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52ab5f6e-5a68-0ee0-76d6-88202d976b00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.585721] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379218, 'name': ReconfigVM_Task, 'duration_secs': 0.479069} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.585963] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 90f8acb1-a0b5-4459-a9d7-c12f652b0b51/90f8acb1-a0b5-4459-a9d7-c12f652b0b51.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 975.586673] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a8d7177-6c94-420e-b0d3-25513df8251f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.593204] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 975.593204] env[61974]: value = "task-1379219" [ 975.593204] env[61974]: _type = "Task" [ 975.593204] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.601857] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379219, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.668523] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "cc048c22-81e0-40fb-9a06-9b84a54e4891" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.668921] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "cc048c22-81e0-40fb-9a06-9b84a54e4891" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.758909] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Releasing lock "refresh_cache-5780d1d6-cd40-4b97-8a68-072c090540af" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.759258] env[61974]: DEBUG nova.compute.manager [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Instance network_info: |[{"id": "1dbdb496-b82f-458d-a43d-7575a27ca979", "address": "fa:16:3e:3f:00:b8", "network": {"id": "870e2179-016d-4c2a-8dff-b56143e5db7b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1966833648-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40e43abf62a5464091aa725e1cff2b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dbdb496-b8", "ovs_interfaceid": "1dbdb496-b82f-458d-a43d-7575a27ca979", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 975.759953] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:00:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '171aeae0-6a27-44fc-bc3d-a2d5581fc702', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1dbdb496-b82f-458d-a43d-7575a27ca979', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 975.768941] env[61974]: DEBUG oslo.service.loopingcall [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 975.768941] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 975.769142] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "b1fa5433-8f26-48db-a19d-d1e11245fb44" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.769618] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8565d945-204c-4cba-9899-ce9b25b09669 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.791576] env[61974]: DEBUG oslo_concurrency.lockutils [None req-490dc04b-23a8-4015-b05c-f87913a0d372 tempest-ServerShowV257Test-337983983 tempest-ServerShowV257Test-337983983-project-member] Lock "7b338210-5be8-4838-b815-8f2c6cc19ccd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.993s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.796070] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 975.796070] env[61974]: value = "task-1379220" [ 975.796070] env[61974]: _type = "Task" [ 975.796070] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.806819] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379220, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.862630] env[61974]: DEBUG nova.compute.manager [req-6570517c-36f2-4014-a181-ad3f553f327e req-f556680a-1886-4fcf-96f8-4aec785f04f6 service nova] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Received event network-vif-plugged-1dbdb496-b82f-458d-a43d-7575a27ca979 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 975.862969] env[61974]: DEBUG oslo_concurrency.lockutils [req-6570517c-36f2-4014-a181-ad3f553f327e req-f556680a-1886-4fcf-96f8-4aec785f04f6 service nova] Acquiring lock "5780d1d6-cd40-4b97-8a68-072c090540af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.863268] env[61974]: DEBUG oslo_concurrency.lockutils [req-6570517c-36f2-4014-a181-ad3f553f327e req-f556680a-1886-4fcf-96f8-4aec785f04f6 service nova] Lock "5780d1d6-cd40-4b97-8a68-072c090540af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.863456] env[61974]: DEBUG oslo_concurrency.lockutils [req-6570517c-36f2-4014-a181-ad3f553f327e req-f556680a-1886-4fcf-96f8-4aec785f04f6 service nova] Lock "5780d1d6-cd40-4b97-8a68-072c090540af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.863635] env[61974]: DEBUG nova.compute.manager [req-6570517c-36f2-4014-a181-ad3f553f327e req-f556680a-1886-4fcf-96f8-4aec785f04f6 service nova] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] No waiting events found dispatching network-vif-plugged-1dbdb496-b82f-458d-a43d-7575a27ca979 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 975.863863] env[61974]: WARNING nova.compute.manager [req-6570517c-36f2-4014-a181-ad3f553f327e req-f556680a-1886-4fcf-96f8-4aec785f04f6 service nova] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Received unexpected event network-vif-plugged-1dbdb496-b82f-458d-a43d-7575a27ca979 for instance with vm_state building and task_state spawning. [ 975.864379] env[61974]: DEBUG nova.compute.manager [req-6570517c-36f2-4014-a181-ad3f553f327e req-f556680a-1886-4fcf-96f8-4aec785f04f6 service nova] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Received event network-changed-1dbdb496-b82f-458d-a43d-7575a27ca979 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 975.864379] env[61974]: DEBUG nova.compute.manager [req-6570517c-36f2-4014-a181-ad3f553f327e req-f556680a-1886-4fcf-96f8-4aec785f04f6 service nova] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Refreshing instance network info cache due to event network-changed-1dbdb496-b82f-458d-a43d-7575a27ca979. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 975.864495] env[61974]: DEBUG oslo_concurrency.lockutils [req-6570517c-36f2-4014-a181-ad3f553f327e req-f556680a-1886-4fcf-96f8-4aec785f04f6 service nova] Acquiring lock "refresh_cache-5780d1d6-cd40-4b97-8a68-072c090540af" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.864768] env[61974]: DEBUG oslo_concurrency.lockutils [req-6570517c-36f2-4014-a181-ad3f553f327e req-f556680a-1886-4fcf-96f8-4aec785f04f6 service nova] Acquired lock "refresh_cache-5780d1d6-cd40-4b97-8a68-072c090540af" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.864984] env[61974]: DEBUG nova.network.neutron [req-6570517c-36f2-4014-a181-ad3f553f327e req-f556680a-1886-4fcf-96f8-4aec785f04f6 service nova] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Refreshing network info cache for port 1dbdb496-b82f-458d-a43d-7575a27ca979 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 976.011084] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52ab5f6e-5a68-0ee0-76d6-88202d976b00, 'name': SearchDatastore_Task, 'duration_secs': 0.014217} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.011379] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.011643] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] abe0168a-e838-468a-a223-7c2a64497c0c/abe0168a-e838-468a-a223-7c2a64497c0c.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 976.011914] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c688f4b8-9c7e-4f77-b48a-a69e5482dc15 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.020510] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 976.020510] env[61974]: value = "task-1379221" [ 976.020510] env[61974]: _type = "Task" [ 976.020510] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.029317] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379221, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.030865] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfff88e7-b617-443f-9ea2-ee39a022f7b3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.037933] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3135d44-299a-4171-bfd5-e033349b2a44 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.067822] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba28e51-8da4-430e-a204-06bb47e961d4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.075303] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8304634-ec39-428d-bba9-1c5d97ef70a8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.091178] env[61974]: DEBUG nova.compute.provider_tree [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 976.102730] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379219, 'name': Rename_Task, 'duration_secs': 0.160762} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.103022] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 976.103414] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-98bef600-f3f6-439e-8d52-1fb086a3c707 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.110078] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 976.110078] env[61974]: value = "task-1379222" [ 976.110078] env[61974]: _type = "Task" [ 976.110078] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.117448] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379222, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.171980] env[61974]: DEBUG nova.compute.manager [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 976.307187] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379220, 'name': CreateVM_Task, 'duration_secs': 0.466268} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.307379] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 976.308193] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 976.308380] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.308795] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 976.309114] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89b000fd-8715-4a29-b9e0-2b226cb0f5e7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.314390] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 976.314390] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]525bac02-5f98-1795-4abf-6bc89322eb61" [ 976.314390] env[61974]: _type = "Task" [ 976.314390] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.325151] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]525bac02-5f98-1795-4abf-6bc89322eb61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.533525] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379221, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.595053] env[61974]: DEBUG nova.scheduler.client.report [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 976.620588] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379222, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.636609] env[61974]: DEBUG nova.network.neutron [req-6570517c-36f2-4014-a181-ad3f553f327e req-f556680a-1886-4fcf-96f8-4aec785f04f6 service nova] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Updated VIF entry in instance network info cache for port 1dbdb496-b82f-458d-a43d-7575a27ca979. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 976.637087] env[61974]: DEBUG nova.network.neutron [req-6570517c-36f2-4014-a181-ad3f553f327e req-f556680a-1886-4fcf-96f8-4aec785f04f6 service nova] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Updating instance_info_cache with network_info: [{"id": "1dbdb496-b82f-458d-a43d-7575a27ca979", "address": "fa:16:3e:3f:00:b8", "network": {"id": "870e2179-016d-4c2a-8dff-b56143e5db7b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1966833648-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40e43abf62a5464091aa725e1cff2b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dbdb496-b8", "ovs_interfaceid": "1dbdb496-b82f-458d-a43d-7575a27ca979", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.695817] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.825227] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]525bac02-5f98-1795-4abf-6bc89322eb61, 'name': SearchDatastore_Task, 'duration_secs': 0.065302} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.825564] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.825809] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 976.826064] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 976.826226] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.826623] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 976.826715] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d97c19b6-e3cb-462e-baf1-62b1367409df {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.838159] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 976.838369] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 976.839461] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "b1fa5433-8f26-48db-a19d-d1e11245fb44" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.839720] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "b1fa5433-8f26-48db-a19d-d1e11245fb44" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.839948] env[61974]: INFO nova.compute.manager [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Attaching volume 7e510873-51bc-41b6-8678-b1220c4a2013 to /dev/sdb [ 976.841594] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9cb345e2-6f8b-403c-932f-5858797881ee {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.847373] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 976.847373] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52f74d87-a724-5d1c-bbeb-efbb9b53e8d2" [ 976.847373] env[61974]: _type = "Task" [ 976.847373] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.858434] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52f74d87-a724-5d1c-bbeb-efbb9b53e8d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.875846] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-607a1443-199e-4f75-a1ce-52c927f6017c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.879760] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b53e8a35-e7b0-4999-8c78-bd723a0d76b1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Volume attach. Driver type: vmdk {{(pid=61974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 976.880169] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b53e8a35-e7b0-4999-8c78-bd723a0d76b1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-292996', 'volume_id': '8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2', 'name': 'volume-8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1c1404fd-a954-4849-883b-7898a7e87e2b', 'attached_at': '', 'detached_at': '', 'volume_id': '8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2', 'serial': '8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2'} {{(pid=61974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 976.881103] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22067196-a326-41e1-bc9b-da036bdbf83c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.900069] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f98b0b6-bc76-48af-92e1-1169697e2fd9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.903051] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54276b4-af1c-4022-9daf-6ac4ad0e2e57 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.929378] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b53e8a35-e7b0-4999-8c78-bd723a0d76b1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] volume-8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2/volume-8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 976.932993] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e004aeca-8b51-4dc1-b693-843aeb2fbb2c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.945671] env[61974]: DEBUG nova.virt.block_device [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Updating existing volume attachment record: 6093776a-0877-4ac2-a01f-22ce9809b808 {{(pid=61974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 976.953504] env[61974]: DEBUG oslo_vmware.api [None req-b53e8a35-e7b0-4999-8c78-bd723a0d76b1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 976.953504] env[61974]: value = "task-1379223" [ 976.953504] env[61974]: _type = "Task" [ 976.953504] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.961440] env[61974]: DEBUG oslo_vmware.api [None req-b53e8a35-e7b0-4999-8c78-bd723a0d76b1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379223, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.031784] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379221, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.536063} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.032076] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] abe0168a-e838-468a-a223-7c2a64497c0c/abe0168a-e838-468a-a223-7c2a64497c0c.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 977.032317] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 977.032598] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8606951e-3ea9-4e38-8372-5e82e26b6974 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.038636] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 977.038636] env[61974]: value = "task-1379224" [ 977.038636] env[61974]: _type = "Task" [ 977.038636] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.046888] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379224, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.103170] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.852s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.106079] env[61974]: DEBUG oslo_concurrency.lockutils [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.887s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.107890] env[61974]: INFO nova.compute.claims [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 977.120054] env[61974]: DEBUG oslo_vmware.api [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379222, 'name': PowerOnVM_Task, 'duration_secs': 0.645578} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.120054] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 977.120181] env[61974]: INFO nova.compute.manager [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Took 9.35 seconds to spawn the instance on the hypervisor. [ 977.120281] env[61974]: DEBUG nova.compute.manager [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 977.121131] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a70591-eae7-49eb-95be-710f1da1c830 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.126889] env[61974]: INFO nova.scheduler.client.report [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Deleted allocations for instance 097ad079-9712-4183-9135-b15ad3a65d6d [ 977.139393] env[61974]: DEBUG oslo_concurrency.lockutils [req-6570517c-36f2-4014-a181-ad3f553f327e req-f556680a-1886-4fcf-96f8-4aec785f04f6 service nova] Releasing lock "refresh_cache-5780d1d6-cd40-4b97-8a68-072c090540af" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.363041] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52f74d87-a724-5d1c-bbeb-efbb9b53e8d2, 'name': SearchDatastore_Task, 'duration_secs': 0.035737} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.363617] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23579b58-e513-4cc7-a14b-c0f789b4f06f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.369038] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 977.369038] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52fd046e-b608-4043-de5f-113bccf5bfd0" [ 977.369038] env[61974]: _type = "Task" [ 977.369038] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.377213] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52fd046e-b608-4043-de5f-113bccf5bfd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.467913] env[61974]: DEBUG oslo_vmware.api [None req-b53e8a35-e7b0-4999-8c78-bd723a0d76b1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379223, 'name': ReconfigVM_Task, 'duration_secs': 0.358338} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.468332] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b53e8a35-e7b0-4999-8c78-bd723a0d76b1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Reconfigured VM instance instance-00000043 to attach disk [datastore2] volume-8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2/volume-8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 977.475583] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-392edd06-23dd-4441-a7b1-0c565e5ba507 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.497058] env[61974]: DEBUG oslo_vmware.api [None req-b53e8a35-e7b0-4999-8c78-bd723a0d76b1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 977.497058] env[61974]: value = "task-1379228" [ 977.497058] env[61974]: _type = "Task" [ 977.497058] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.507637] env[61974]: DEBUG oslo_vmware.api [None req-b53e8a35-e7b0-4999-8c78-bd723a0d76b1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379228, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.548544] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379224, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.14415} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.548967] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 977.550260] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-545a196b-0d32-475a-aad0-7f5b79bf06d8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.572077] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] abe0168a-e838-468a-a223-7c2a64497c0c/abe0168a-e838-468a-a223-7c2a64497c0c.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 977.572373] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b18de66-04df-4358-bd5d-4c7b4116dbde {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.591950] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 977.591950] env[61974]: value = "task-1379229" [ 977.591950] env[61974]: _type = "Task" [ 977.591950] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.600527] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379229, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.644175] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f26e4ccb-95a4-4283-8f45-c1175975f712 tempest-ServersTestMultiNic-506975209 tempest-ServersTestMultiNic-506975209-project-member] Lock "097ad079-9712-4183-9135-b15ad3a65d6d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.327s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.646336] env[61974]: INFO nova.compute.manager [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Took 31.89 seconds to build instance. [ 977.878850] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52fd046e-b608-4043-de5f-113bccf5bfd0, 'name': SearchDatastore_Task, 'duration_secs': 0.009483} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.879157] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.879456] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 5780d1d6-cd40-4b97-8a68-072c090540af/5780d1d6-cd40-4b97-8a68-072c090540af.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 977.879725] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6a985548-734d-41c2-8f3f-ef07e9348a54 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.885294] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 977.885294] env[61974]: value = "task-1379230" [ 977.885294] env[61974]: _type = "Task" [ 977.885294] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.893445] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379230, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.006929] env[61974]: DEBUG oslo_vmware.api [None req-b53e8a35-e7b0-4999-8c78-bd723a0d76b1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379228, 'name': ReconfigVM_Task, 'duration_secs': 0.189012} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.007267] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b53e8a35-e7b0-4999-8c78-bd723a0d76b1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-292996', 'volume_id': '8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2', 'name': 'volume-8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1c1404fd-a954-4849-883b-7898a7e87e2b', 'attached_at': '', 'detached_at': '', 'volume_id': '8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2', 'serial': '8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2'} {{(pid=61974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 978.101272] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379229, 'name': ReconfigVM_Task, 'duration_secs': 0.282273} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.101637] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Reconfigured VM instance instance-00000053 to attach disk [datastore2] abe0168a-e838-468a-a223-7c2a64497c0c/abe0168a-e838-468a-a223-7c2a64497c0c.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 978.102407] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c4604c20-45a5-4aaa-ae19-b7474d710918 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.109271] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 978.109271] env[61974]: value = "task-1379231" [ 978.109271] env[61974]: _type = "Task" [ 978.109271] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.117636] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379231, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.150646] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b58851f1-b074-450a-a362-f6fb5e99e664 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "90f8acb1-a0b5-4459-a9d7-c12f652b0b51" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.908s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.372907] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c16da0-acef-403c-ba03-3afa933d5952 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.381720] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f891bcb-d9f4-4fd8-9084-8c6ff2d76ef7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.418998] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e936cb6-374e-470f-9549-90e135862f22 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.425113] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379230, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.430435] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1129840-33ad-44fb-8ad2-ee59a38842d5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.444712] env[61974]: DEBUG nova.compute.provider_tree [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.620581] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379231, 'name': Rename_Task, 'duration_secs': 0.314606} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.620941] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 978.621247] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b63b3a8a-3f1c-448c-b5d6-efeeb6257368 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.628503] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 978.628503] env[61974]: value = "task-1379232" [ 978.628503] env[61974]: _type = "Task" [ 978.628503] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.638885] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379232, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.896425] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379230, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.851401} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.896740] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 5780d1d6-cd40-4b97-8a68-072c090540af/5780d1d6-cd40-4b97-8a68-072c090540af.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 978.896971] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 978.897250] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6ccc9346-a096-4814-b1cc-17e969af5d3c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.904622] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 978.904622] env[61974]: value = "task-1379233" [ 978.904622] env[61974]: _type = "Task" [ 978.904622] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.912561] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379233, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.947806] env[61974]: DEBUG nova.scheduler.client.report [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 979.059517] env[61974]: DEBUG nova.objects.instance [None req-b53e8a35-e7b0-4999-8c78-bd723a0d76b1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lazy-loading 'flavor' on Instance uuid 1c1404fd-a954-4849-883b-7898a7e87e2b {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 979.142210] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379232, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.415399] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379233, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063561} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.415522] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 979.416375] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35683ff7-a188-4696-9f91-9a006d0f45c0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.446054] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] 5780d1d6-cd40-4b97-8a68-072c090540af/5780d1d6-cd40-4b97-8a68-072c090540af.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 979.446570] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07b5219c-b052-4b8f-ba2b-18d7264708f4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.467662] env[61974]: DEBUG oslo_concurrency.lockutils [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.361s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.468254] env[61974]: DEBUG nova.compute.manager [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 979.475022] env[61974]: DEBUG oslo_concurrency.lockutils [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 23.893s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.475022] env[61974]: DEBUG nova.objects.instance [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61974) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 979.483621] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 979.483621] env[61974]: value = "task-1379235" [ 979.483621] env[61974]: _type = "Task" [ 979.483621] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.496364] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379235, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.564831] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b53e8a35-e7b0-4999-8c78-bd723a0d76b1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "1c1404fd-a954-4849-883b-7898a7e87e2b" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.294s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.643481] env[61974]: DEBUG oslo_vmware.api [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379232, 'name': PowerOnVM_Task, 'duration_secs': 0.912612} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.643765] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 979.644050] env[61974]: INFO nova.compute.manager [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Took 8.44 seconds to spawn the instance on the hypervisor. [ 979.644283] env[61974]: DEBUG nova.compute.manager [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 979.645169] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8885b163-575e-43eb-90de-175f3210c46f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.980818] env[61974]: DEBUG nova.compute.utils [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 979.982345] env[61974]: DEBUG nova.compute.manager [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 979.982519] env[61974]: DEBUG nova.network.neutron [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 979.995018] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379235, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.070379] env[61974]: DEBUG nova.policy [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '44db76dad27e40cdb4507bfe842db572', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fb6e7e7e52fc4aacaf5054732cd7d2df', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 980.170879] env[61974]: INFO nova.compute.manager [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Took 31.71 seconds to build instance. [ 980.485648] env[61974]: DEBUG oslo_concurrency.lockutils [None req-75dde7b0-a1ae-42fc-82df-6a243d5e45cb tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.485648] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 24.708s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.488178] env[61974]: DEBUG nova.compute.manager [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 980.512439] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379235, 'name': ReconfigVM_Task, 'duration_secs': 0.73126} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.512439] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Reconfigured VM instance instance-00000054 to attach disk [datastore2] 5780d1d6-cd40-4b97-8a68-072c090540af/5780d1d6-cd40-4b97-8a68-072c090540af.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 980.513756] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0ea70606-9d14-4d83-96c5-02f87d89ca26 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.525027] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 980.525027] env[61974]: value = "task-1379236" [ 980.525027] env[61974]: _type = "Task" [ 980.525027] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.538290] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379236, 'name': Rename_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.676344] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a88df78b-22f6-4982-ad62-0579a894f8e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "abe0168a-e838-468a-a223-7c2a64497c0c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.709s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.714127] env[61974]: DEBUG oslo_concurrency.lockutils [None req-31666c5a-1d31-4b4c-b096-d7cbfc11257e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "450956aa-cc55-481c-acf6-287abc8b8efe" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.714512] env[61974]: DEBUG oslo_concurrency.lockutils [None req-31666c5a-1d31-4b4c-b096-d7cbfc11257e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "450956aa-cc55-481c-acf6-287abc8b8efe" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.716317] env[61974]: DEBUG nova.compute.manager [None req-31666c5a-1d31-4b4c-b096-d7cbfc11257e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 980.717287] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f99021-6111-42d7-b429-6ec946440718 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.725257] env[61974]: DEBUG nova.compute.manager [None req-31666c5a-1d31-4b4c-b096-d7cbfc11257e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61974) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 980.725888] env[61974]: DEBUG nova.objects.instance [None req-31666c5a-1d31-4b4c-b096-d7cbfc11257e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lazy-loading 'flavor' on Instance uuid 450956aa-cc55-481c-acf6-287abc8b8efe {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 980.853666] env[61974]: DEBUG nova.network.neutron [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Successfully created port: e51d39a0-9311-4ac6-95ed-ddf57e132aa6 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 981.036173] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379236, 'name': Rename_Task, 'duration_secs': 0.234037} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.036516] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 981.036785] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e63eff4e-3f0c-45dd-884a-0f56ef43d8a4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.043230] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 981.043230] env[61974]: value = "task-1379237" [ 981.043230] env[61974]: _type = "Task" [ 981.043230] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.053106] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379237, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.231390] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-31666c5a-1d31-4b4c-b096-d7cbfc11257e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 981.231712] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-865b62a9-f7f1-44c2-8308-864651622664 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.240095] env[61974]: DEBUG oslo_vmware.api [None req-31666c5a-1d31-4b4c-b096-d7cbfc11257e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 981.240095] env[61974]: value = "task-1379238" [ 981.240095] env[61974]: _type = "Task" [ 981.240095] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.249797] env[61974]: DEBUG oslo_vmware.api [None req-31666c5a-1d31-4b4c-b096-d7cbfc11257e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379238, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.499220] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Volume attach. Driver type: vmdk {{(pid=61974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 981.499220] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293000', 'volume_id': '7e510873-51bc-41b6-8678-b1220c4a2013', 'name': 'volume-7e510873-51bc-41b6-8678-b1220c4a2013', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b1fa5433-8f26-48db-a19d-d1e11245fb44', 'attached_at': '', 'detached_at': '', 'volume_id': '7e510873-51bc-41b6-8678-b1220c4a2013', 'serial': '7e510873-51bc-41b6-8678-b1220c4a2013'} {{(pid=61974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 981.500459] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd003ac-c2f2-4d01-bae3-a8494b4143f6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.519278] env[61974]: DEBUG nova.compute.manager [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 981.526377] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd9dfb14-8910-4b34-bc83-041c4c578956 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.555274] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] volume-7e510873-51bc-41b6-8678-b1220c4a2013/volume-7e510873-51bc-41b6-8678-b1220c4a2013.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 981.558481] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 1c1404fd-a954-4849-883b-7898a7e87e2b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 981.558839] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance b1fa5433-8f26-48db-a19d-d1e11245fb44 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 981.559094] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 0ce75511-290c-4fea-9657-dfdd8d9efc4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 981.559344] env[61974]: WARNING nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 45fda940-b7f0-410c-b31a-b5cd365c28fe is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 981.559585] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 981.559862] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 59c72be0-46de-4cb8-93d6-0a2c70c90e2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 981.559862] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance ceb0dd02-6441-4923-99f6-73f8eab86fe5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 981.559862] env[61974]: WARNING nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance eb6dfd21-0ba6-455c-b14e-80dacaf6b92c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 981.560157] env[61974]: WARNING nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 981.560431] env[61974]: WARNING nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance c06a7599-58e8-4796-9e95-d96327f649d0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 981.560628] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 450956aa-cc55-481c-acf6-287abc8b8efe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 981.560807] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 68ad5903-e502-406b-a19e-9e4c28aa5035 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 981.561045] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 90f8acb1-a0b5-4459-a9d7-c12f652b0b51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 981.561252] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance abe0168a-e838-468a-a223-7c2a64497c0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 981.561408] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 5780d1d6-cd40-4b97-8a68-072c090540af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 981.561532] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance e6feee04-8aae-4151-8187-3ef4885bcf73 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 981.568743] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd55a27b-6efd-41e7-ad65-41e9ed399e94 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.592376] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379237, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.595566] env[61974]: DEBUG nova.virt.hardware [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 981.595805] env[61974]: DEBUG nova.virt.hardware [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 981.595967] env[61974]: DEBUG nova.virt.hardware [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 981.596172] env[61974]: DEBUG nova.virt.hardware [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 981.596328] env[61974]: DEBUG nova.virt.hardware [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 981.596479] env[61974]: DEBUG nova.virt.hardware [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 981.596716] env[61974]: DEBUG nova.virt.hardware [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 981.596927] env[61974]: DEBUG nova.virt.hardware [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 981.597115] env[61974]: DEBUG nova.virt.hardware [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 981.597345] env[61974]: DEBUG nova.virt.hardware [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 981.597575] env[61974]: DEBUG nova.virt.hardware [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 981.597936] env[61974]: DEBUG oslo_vmware.api [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 981.597936] env[61974]: value = "task-1379239" [ 981.597936] env[61974]: _type = "Task" [ 981.597936] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.598955] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd200258-7886-432e-9773-8b4badf6b7f8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.617903] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d38f203-369a-4904-8169-cd3270c7b218 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.623658] env[61974]: DEBUG oslo_vmware.api [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379239, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.751829] env[61974]: DEBUG oslo_vmware.api [None req-31666c5a-1d31-4b4c-b096-d7cbfc11257e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379238, 'name': PowerOffVM_Task, 'duration_secs': 0.218757} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.752161] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-31666c5a-1d31-4b4c-b096-d7cbfc11257e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 981.752392] env[61974]: DEBUG nova.compute.manager [None req-31666c5a-1d31-4b4c-b096-d7cbfc11257e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 981.753313] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb81e1b-45c1-43a3-93ef-7f032f6c82e6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.966036] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "1a04b388-8739-4b46-a8e1-cd79835bcf48" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.966329] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "1a04b388-8739-4b46-a8e1-cd79835bcf48" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.060399] env[61974]: DEBUG oslo_vmware.api [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379237, 'name': PowerOnVM_Task, 'duration_secs': 0.836939} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.061111] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 982.061395] env[61974]: INFO nova.compute.manager [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Took 8.50 seconds to spawn the instance on the hypervisor. [ 982.061622] env[61974]: DEBUG nova.compute.manager [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 982.062409] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec2a551-705a-48cd-948d-39a43ed0304a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.084590] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 9c26e20b-dfc4-432c-a851-499dbea18f01 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 982.121557] env[61974]: DEBUG oslo_vmware.api [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379239, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.272913] env[61974]: DEBUG oslo_concurrency.lockutils [None req-31666c5a-1d31-4b4c-b096-d7cbfc11257e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "450956aa-cc55-481c-acf6-287abc8b8efe" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.558s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.468750] env[61974]: DEBUG nova.compute.manager [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 982.584580] env[61974]: INFO nova.compute.manager [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Took 33.73 seconds to build instance. [ 982.589089] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance cc048c22-81e0-40fb-9a06-9b84a54e4891 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 982.620288] env[61974]: DEBUG oslo_vmware.api [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379239, 'name': ReconfigVM_Task, 'duration_secs': 0.654056} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.620524] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Reconfigured VM instance instance-00000045 to attach disk [datastore1] volume-7e510873-51bc-41b6-8678-b1220c4a2013/volume-7e510873-51bc-41b6-8678-b1220c4a2013.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 982.625756] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-83fdaa17-7355-44b3-bc0e-d64c63b1c6ce {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.641532] env[61974]: DEBUG nova.compute.manager [req-cbc20a34-1749-4ee0-ae05-265635b2b4ad req-cd292357-59db-421c-a058-f897a78e742a service nova] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Received event network-vif-plugged-e51d39a0-9311-4ac6-95ed-ddf57e132aa6 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 982.641921] env[61974]: DEBUG oslo_concurrency.lockutils [req-cbc20a34-1749-4ee0-ae05-265635b2b4ad req-cd292357-59db-421c-a058-f897a78e742a service nova] Acquiring lock "e6feee04-8aae-4151-8187-3ef4885bcf73-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.642190] env[61974]: DEBUG oslo_concurrency.lockutils [req-cbc20a34-1749-4ee0-ae05-265635b2b4ad req-cd292357-59db-421c-a058-f897a78e742a service nova] Lock "e6feee04-8aae-4151-8187-3ef4885bcf73-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.642380] env[61974]: DEBUG oslo_concurrency.lockutils [req-cbc20a34-1749-4ee0-ae05-265635b2b4ad req-cd292357-59db-421c-a058-f897a78e742a service nova] Lock "e6feee04-8aae-4151-8187-3ef4885bcf73-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.642649] env[61974]: DEBUG nova.compute.manager [req-cbc20a34-1749-4ee0-ae05-265635b2b4ad req-cd292357-59db-421c-a058-f897a78e742a service nova] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] No waiting events found dispatching network-vif-plugged-e51d39a0-9311-4ac6-95ed-ddf57e132aa6 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 982.642729] env[61974]: WARNING nova.compute.manager [req-cbc20a34-1749-4ee0-ae05-265635b2b4ad req-cd292357-59db-421c-a058-f897a78e742a service nova] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Received unexpected event network-vif-plugged-e51d39a0-9311-4ac6-95ed-ddf57e132aa6 for instance with vm_state building and task_state spawning. [ 982.644974] env[61974]: DEBUG oslo_vmware.api [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 982.644974] env[61974]: value = "task-1379240" [ 982.644974] env[61974]: _type = "Task" [ 982.644974] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.657058] env[61974]: DEBUG oslo_vmware.api [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379240, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.809701] env[61974]: DEBUG nova.network.neutron [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Successfully updated port: e51d39a0-9311-4ac6-95ed-ddf57e132aa6 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 983.004631] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.087237] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ce0ef263-19e8-4375-8b40-dc2e6f40fe43 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "5780d1d6-cd40-4b97-8a68-072c090540af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.658s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.092083] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 1a04b388-8739-4b46-a8e1-cd79835bcf48 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 983.092817] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 983.094218] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2880MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 983.158710] env[61974]: DEBUG oslo_vmware.api [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379240, 'name': ReconfigVM_Task, 'duration_secs': 0.27524} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.159194] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293000', 'volume_id': '7e510873-51bc-41b6-8678-b1220c4a2013', 'name': 'volume-7e510873-51bc-41b6-8678-b1220c4a2013', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b1fa5433-8f26-48db-a19d-d1e11245fb44', 'attached_at': '', 'detached_at': '', 'volume_id': '7e510873-51bc-41b6-8678-b1220c4a2013', 'serial': '7e510873-51bc-41b6-8678-b1220c4a2013'} {{(pid=61974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 983.315896] env[61974]: DEBUG oslo_concurrency.lockutils [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "refresh_cache-e6feee04-8aae-4151-8187-3ef4885bcf73" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.316222] env[61974]: DEBUG oslo_concurrency.lockutils [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquired lock "refresh_cache-e6feee04-8aae-4151-8187-3ef4885bcf73" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.316222] env[61974]: DEBUG nova.network.neutron [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 983.404747] env[61974]: DEBUG nova.objects.instance [None req-776c56e2-3000-46df-a608-52049fd1a381 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lazy-loading 'flavor' on Instance uuid 450956aa-cc55-481c-acf6-287abc8b8efe {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 983.428147] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee7a12e-d41b-41a3-a092-4ca29ba60b11 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.437574] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ab2b30-397f-40c4-b656-c1c2c564e622 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.480805] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98cdda70-dce9-4a65-bc6a-74c31d8207b2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.491137] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0993436f-a096-401c-9755-bf40619c87aa {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.508150] env[61974]: DEBUG nova.compute.provider_tree [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 983.808720] env[61974]: DEBUG nova.compute.manager [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Stashing vm_state: active {{(pid=61974) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 983.870132] env[61974]: DEBUG nova.network.neutron [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 983.908971] env[61974]: DEBUG oslo_concurrency.lockutils [None req-776c56e2-3000-46df-a608-52049fd1a381 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "refresh_cache-450956aa-cc55-481c-acf6-287abc8b8efe" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.909155] env[61974]: DEBUG oslo_concurrency.lockutils [None req-776c56e2-3000-46df-a608-52049fd1a381 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquired lock "refresh_cache-450956aa-cc55-481c-acf6-287abc8b8efe" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.909219] env[61974]: DEBUG nova.network.neutron [None req-776c56e2-3000-46df-a608-52049fd1a381 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 983.913885] env[61974]: DEBUG nova.objects.instance [None req-776c56e2-3000-46df-a608-52049fd1a381 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lazy-loading 'info_cache' on Instance uuid 450956aa-cc55-481c-acf6-287abc8b8efe {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 984.016377] env[61974]: DEBUG nova.scheduler.client.report [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 984.146781] env[61974]: DEBUG nova.network.neutron [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Updating instance_info_cache with network_info: [{"id": "e51d39a0-9311-4ac6-95ed-ddf57e132aa6", "address": "fa:16:3e:85:16:5c", "network": {"id": "a6e61508-1f16-48a9-a21d-2f9212fcf523", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1486161933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb6e7e7e52fc4aacaf5054732cd7d2df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ccbc7a-cf8d-4ea2-8411-291a1e27df7b", "external-id": "nsx-vlan-transportzone-998", "segmentation_id": 998, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape51d39a0-93", "ovs_interfaceid": "e51d39a0-9311-4ac6-95ed-ddf57e132aa6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.274266] env[61974]: DEBUG nova.objects.instance [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lazy-loading 'flavor' on Instance uuid b1fa5433-8f26-48db-a19d-d1e11245fb44 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 984.335106] env[61974]: DEBUG oslo_concurrency.lockutils [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.420809] env[61974]: DEBUG nova.objects.base [None req-776c56e2-3000-46df-a608-52049fd1a381 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Object Instance<450956aa-cc55-481c-acf6-287abc8b8efe> lazy-loaded attributes: flavor,info_cache {{(pid=61974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 984.522608] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 984.523696] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.038s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.525127] env[61974]: DEBUG nova.compute.manager [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 984.525127] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.103s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.528058] env[61974]: INFO nova.compute.claims [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 984.533223] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e6a1a04-6d4a-4c73-aa1b-faec34f86ae2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.652837] env[61974]: DEBUG oslo_concurrency.lockutils [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Releasing lock "refresh_cache-e6feee04-8aae-4151-8187-3ef4885bcf73" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.653593] env[61974]: DEBUG nova.compute.manager [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Instance network_info: |[{"id": "e51d39a0-9311-4ac6-95ed-ddf57e132aa6", "address": "fa:16:3e:85:16:5c", "network": {"id": "a6e61508-1f16-48a9-a21d-2f9212fcf523", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1486161933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb6e7e7e52fc4aacaf5054732cd7d2df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ccbc7a-cf8d-4ea2-8411-291a1e27df7b", "external-id": "nsx-vlan-transportzone-998", "segmentation_id": 998, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape51d39a0-93", "ovs_interfaceid": "e51d39a0-9311-4ac6-95ed-ddf57e132aa6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 984.653748] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:16:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '04ccbc7a-cf8d-4ea2-8411-291a1e27df7b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e51d39a0-9311-4ac6-95ed-ddf57e132aa6', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 984.661558] env[61974]: DEBUG oslo.service.loopingcall [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 984.661881] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 984.662164] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-deab9a18-b517-4102-95d3-d63bcdb2dc45 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.686182] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 984.686182] env[61974]: value = "task-1379241" [ 984.686182] env[61974]: _type = "Task" [ 984.686182] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.698468] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379241, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.700958] env[61974]: DEBUG nova.compute.manager [req-dd2b3c1d-1f62-43c1-8400-8f8fda007887 req-fecab487-9bb9-4b1a-9e5a-4abed76c928b service nova] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Received event network-changed-e51d39a0-9311-4ac6-95ed-ddf57e132aa6 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 984.701266] env[61974]: DEBUG nova.compute.manager [req-dd2b3c1d-1f62-43c1-8400-8f8fda007887 req-fecab487-9bb9-4b1a-9e5a-4abed76c928b service nova] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Refreshing instance network info cache due to event network-changed-e51d39a0-9311-4ac6-95ed-ddf57e132aa6. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 984.701436] env[61974]: DEBUG oslo_concurrency.lockutils [req-dd2b3c1d-1f62-43c1-8400-8f8fda007887 req-fecab487-9bb9-4b1a-9e5a-4abed76c928b service nova] Acquiring lock "refresh_cache-e6feee04-8aae-4151-8187-3ef4885bcf73" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.701622] env[61974]: DEBUG oslo_concurrency.lockutils [req-dd2b3c1d-1f62-43c1-8400-8f8fda007887 req-fecab487-9bb9-4b1a-9e5a-4abed76c928b service nova] Acquired lock "refresh_cache-e6feee04-8aae-4151-8187-3ef4885bcf73" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.701751] env[61974]: DEBUG nova.network.neutron [req-dd2b3c1d-1f62-43c1-8400-8f8fda007887 req-fecab487-9bb9-4b1a-9e5a-4abed76c928b service nova] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Refreshing network info cache for port e51d39a0-9311-4ac6-95ed-ddf57e132aa6 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 984.780690] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d95523f-586f-4465-b089-d0017d7f4467 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "b1fa5433-8f26-48db-a19d-d1e11245fb44" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.941s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.048294] env[61974]: INFO nova.compute.manager [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] instance snapshotting [ 985.051669] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22adf8ac-a0e1-4e05-ba65-a1a8d85bde0c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.078834] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-718eafda-b5f3-4c13-94dc-15b00e19356b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.200673] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379241, 'name': CreateVM_Task, 'duration_secs': 0.414593} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.201667] env[61974]: DEBUG nova.network.neutron [None req-776c56e2-3000-46df-a608-52049fd1a381 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Updating instance_info_cache with network_info: [{"id": "23ed5afc-e506-4637-9fdd-6a2630023f66", "address": "fa:16:3e:4a:2d:a2", "network": {"id": "8888bbd4-6828-4d33-8135-899b225e8a70", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-605557774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38bb1d7cc5574657a98eaefb81321006", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "462a7219-4deb-4225-9cf7-3131ef280363", "external-id": "nsx-vlan-transportzone-468", "segmentation_id": 468, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23ed5afc-e5", "ovs_interfaceid": "23ed5afc-e506-4637-9fdd-6a2630023f66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.202852] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 985.203823] env[61974]: DEBUG oslo_concurrency.lockutils [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.203823] env[61974]: DEBUG oslo_concurrency.lockutils [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.204067] env[61974]: DEBUG oslo_concurrency.lockutils [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 985.204549] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27118feb-e2c5-4e12-8222-1889c22822b9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.212449] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 985.212449] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52a99284-e72d-d5ef-5586-5bd62d6eb61c" [ 985.212449] env[61974]: _type = "Task" [ 985.212449] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.227610] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52a99284-e72d-d5ef-5586-5bd62d6eb61c, 'name': SearchDatastore_Task, 'duration_secs': 0.011936} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.227936] env[61974]: DEBUG oslo_concurrency.lockutils [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.228203] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 985.229347] env[61974]: DEBUG oslo_concurrency.lockutils [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.229347] env[61974]: DEBUG oslo_concurrency.lockutils [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.229347] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 985.229347] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e5475d8c-29df-4c84-b365-690dd4f7800e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.239412] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 985.239647] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 985.240778] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f7eff80-5e01-4179-80e4-7d62c2de2cec {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.248677] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 985.248677] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5255ebc8-a9ef-6369-313b-a5cb0622c666" [ 985.248677] env[61974]: _type = "Task" [ 985.248677] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.258342] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5255ebc8-a9ef-6369-313b-a5cb0622c666, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.477905] env[61974]: DEBUG oslo_concurrency.lockutils [None req-94f19af7-4a3d-4bb7-9d80-5a7688b6c76c tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "b1fa5433-8f26-48db-a19d-d1e11245fb44" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.478422] env[61974]: DEBUG oslo_concurrency.lockutils [None req-94f19af7-4a3d-4bb7-9d80-5a7688b6c76c tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "b1fa5433-8f26-48db-a19d-d1e11245fb44" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.478422] env[61974]: DEBUG nova.compute.manager [None req-94f19af7-4a3d-4bb7-9d80-5a7688b6c76c tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 985.479679] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cfd2329-ec99-4c35-86e9-20cb25a97b5b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.483087] env[61974]: DEBUG nova.network.neutron [req-dd2b3c1d-1f62-43c1-8400-8f8fda007887 req-fecab487-9bb9-4b1a-9e5a-4abed76c928b service nova] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Updated VIF entry in instance network info cache for port e51d39a0-9311-4ac6-95ed-ddf57e132aa6. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 985.484513] env[61974]: DEBUG nova.network.neutron [req-dd2b3c1d-1f62-43c1-8400-8f8fda007887 req-fecab487-9bb9-4b1a-9e5a-4abed76c928b service nova] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Updating instance_info_cache with network_info: [{"id": "e51d39a0-9311-4ac6-95ed-ddf57e132aa6", "address": "fa:16:3e:85:16:5c", "network": {"id": "a6e61508-1f16-48a9-a21d-2f9212fcf523", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1486161933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb6e7e7e52fc4aacaf5054732cd7d2df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ccbc7a-cf8d-4ea2-8411-291a1e27df7b", "external-id": "nsx-vlan-transportzone-998", "segmentation_id": 998, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape51d39a0-93", "ovs_interfaceid": "e51d39a0-9311-4ac6-95ed-ddf57e132aa6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.490015] env[61974]: DEBUG nova.compute.manager [None req-94f19af7-4a3d-4bb7-9d80-5a7688b6c76c tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61974) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 985.490812] env[61974]: DEBUG nova.objects.instance [None req-94f19af7-4a3d-4bb7-9d80-5a7688b6c76c tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lazy-loading 'flavor' on Instance uuid b1fa5433-8f26-48db-a19d-d1e11245fb44 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 985.595669] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Creating Snapshot of the VM instance {{(pid=61974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 985.596026] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-99eba89c-dd80-415c-bd9f-c698bfa8694b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.604958] env[61974]: DEBUG oslo_concurrency.lockutils [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Acquiring lock "ef17d87d-31ae-4d08-afba-157521e7d1e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.605565] env[61974]: DEBUG oslo_concurrency.lockutils [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Lock "ef17d87d-31ae-4d08-afba-157521e7d1e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.612971] env[61974]: DEBUG oslo_vmware.api [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 985.612971] env[61974]: value = "task-1379242" [ 985.612971] env[61974]: _type = "Task" [ 985.612971] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.626963] env[61974]: DEBUG oslo_vmware.api [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379242, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.707059] env[61974]: DEBUG oslo_concurrency.lockutils [None req-776c56e2-3000-46df-a608-52049fd1a381 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Releasing lock "refresh_cache-450956aa-cc55-481c-acf6-287abc8b8efe" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.763589] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5255ebc8-a9ef-6369-313b-a5cb0622c666, 'name': SearchDatastore_Task, 'duration_secs': 0.011416} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.767192] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96cc8bfa-73c4-48b3-ab4b-900c311a4a40 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.776154] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 985.776154] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52b2d61a-cb7f-f44b-54fb-dbb085f77853" [ 985.776154] env[61974]: _type = "Task" [ 985.776154] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.789293] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52b2d61a-cb7f-f44b-54fb-dbb085f77853, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.844924] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9008193c-6e08-4ffb-8563-4fd234f429c7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.854023] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc547d7-6c41-4a5d-9091-7505b8d707a4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.886717] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3fc3b5-58f1-4d6d-8b8e-a2abd37cfb4b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.895303] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39a068f-dc5c-4bdf-a211-c07bb97c5a47 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.910615] env[61974]: DEBUG nova.compute.provider_tree [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 985.988053] env[61974]: DEBUG oslo_concurrency.lockutils [req-dd2b3c1d-1f62-43c1-8400-8f8fda007887 req-fecab487-9bb9-4b1a-9e5a-4abed76c928b service nova] Releasing lock "refresh_cache-e6feee04-8aae-4151-8187-3ef4885bcf73" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.996040] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-94f19af7-4a3d-4bb7-9d80-5a7688b6c76c tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 985.996324] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b13b181e-f38c-4791-8673-5ae606800dc1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.004894] env[61974]: DEBUG oslo_vmware.api [None req-94f19af7-4a3d-4bb7-9d80-5a7688b6c76c tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 986.004894] env[61974]: value = "task-1379243" [ 986.004894] env[61974]: _type = "Task" [ 986.004894] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.014203] env[61974]: DEBUG oslo_vmware.api [None req-94f19af7-4a3d-4bb7-9d80-5a7688b6c76c tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379243, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.108054] env[61974]: DEBUG nova.compute.manager [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 986.127743] env[61974]: DEBUG oslo_vmware.api [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379242, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.212558] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-776c56e2-3000-46df-a608-52049fd1a381 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 986.212558] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c53f21c8-8b33-4749-b3d6-eaef93eb2def {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.222062] env[61974]: DEBUG oslo_vmware.api [None req-776c56e2-3000-46df-a608-52049fd1a381 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 986.222062] env[61974]: value = "task-1379244" [ 986.222062] env[61974]: _type = "Task" [ 986.222062] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.231508] env[61974]: DEBUG oslo_vmware.api [None req-776c56e2-3000-46df-a608-52049fd1a381 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379244, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.289247] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52b2d61a-cb7f-f44b-54fb-dbb085f77853, 'name': SearchDatastore_Task, 'duration_secs': 0.013354} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.289769] env[61974]: DEBUG oslo_concurrency.lockutils [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.290254] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] e6feee04-8aae-4151-8187-3ef4885bcf73/e6feee04-8aae-4151-8187-3ef4885bcf73.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 986.290732] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd1c71e8-17a4-42d2-abad-33a885880402 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.301314] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 986.301314] env[61974]: value = "task-1379245" [ 986.301314] env[61974]: _type = "Task" [ 986.301314] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.313570] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379245, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.416028] env[61974]: DEBUG nova.scheduler.client.report [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 986.517570] env[61974]: DEBUG oslo_vmware.api [None req-94f19af7-4a3d-4bb7-9d80-5a7688b6c76c tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379243, 'name': PowerOffVM_Task, 'duration_secs': 0.315037} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.517840] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-94f19af7-4a3d-4bb7-9d80-5a7688b6c76c tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 986.518109] env[61974]: DEBUG nova.compute.manager [None req-94f19af7-4a3d-4bb7-9d80-5a7688b6c76c tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 986.519066] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd92aea6-04b5-4b63-be73-5839ff49a3b3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.631820] env[61974]: DEBUG oslo_vmware.api [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379242, 'name': CreateSnapshot_Task, 'duration_secs': 0.961393} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.632112] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Created Snapshot of the VM instance {{(pid=61974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 986.633282] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c6cdbd-8b95-42e0-b4e6-d52b4142f536 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.637034] env[61974]: DEBUG oslo_concurrency.lockutils [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.733218] env[61974]: DEBUG oslo_vmware.api [None req-776c56e2-3000-46df-a608-52049fd1a381 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379244, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.817397] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379245, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.920234] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.395s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.921031] env[61974]: DEBUG nova.compute.manager [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 986.923921] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.038s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.924179] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.926625] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.138s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.926867] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.929286] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.049s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.929614] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.931771] env[61974]: DEBUG oslo_concurrency.lockutils [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.648s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.931982] env[61974]: DEBUG oslo_concurrency.lockutils [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.934361] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.239s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.935988] env[61974]: INFO nova.compute.claims [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 986.966066] env[61974]: INFO nova.scheduler.client.report [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Deleted allocations for instance a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec [ 986.968108] env[61974]: INFO nova.scheduler.client.report [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Deleted allocations for instance 45fda940-b7f0-410c-b31a-b5cd365c28fe [ 986.981139] env[61974]: INFO nova.scheduler.client.report [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Deleted allocations for instance c06a7599-58e8-4796-9e95-d96327f649d0 [ 986.986194] env[61974]: INFO nova.scheduler.client.report [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Deleted allocations for instance eb6dfd21-0ba6-455c-b14e-80dacaf6b92c [ 987.033097] env[61974]: DEBUG oslo_concurrency.lockutils [None req-94f19af7-4a3d-4bb7-9d80-5a7688b6c76c tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "b1fa5433-8f26-48db-a19d-d1e11245fb44" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.555s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.158647] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Creating linked-clone VM from snapshot {{(pid=61974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 987.158984] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1cf2aeba-6c22-4467-8001-4522ca1ad30d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.170054] env[61974]: DEBUG oslo_vmware.api [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 987.170054] env[61974]: value = "task-1379246" [ 987.170054] env[61974]: _type = "Task" [ 987.170054] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.178966] env[61974]: DEBUG oslo_vmware.api [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379246, 'name': CloneVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.232997] env[61974]: DEBUG oslo_vmware.api [None req-776c56e2-3000-46df-a608-52049fd1a381 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379244, 'name': PowerOnVM_Task, 'duration_secs': 0.642667} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.233301] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-776c56e2-3000-46df-a608-52049fd1a381 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 987.233577] env[61974]: DEBUG nova.compute.manager [None req-776c56e2-3000-46df-a608-52049fd1a381 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 987.234275] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdf84a2b-c162-43aa-9a36-8fbc6f8253bc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.315747] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379245, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.718349} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.316138] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] e6feee04-8aae-4151-8187-3ef4885bcf73/e6feee04-8aae-4151-8187-3ef4885bcf73.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 987.316382] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 987.316664] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dff2d619-0019-487a-818e-bb77bd6adb8b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.324769] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 987.324769] env[61974]: value = "task-1379247" [ 987.324769] env[61974]: _type = "Task" [ 987.324769] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.333221] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379247, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.441227] env[61974]: DEBUG nova.compute.utils [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 987.444405] env[61974]: DEBUG nova.compute.manager [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 987.444734] env[61974]: DEBUG nova.network.neutron [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 987.479876] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f15411f0-fc31-4241-b378-881de072a3dc tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.499s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.481249] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1f9f4970-b3b2-41f5-a50e-7e179525649a tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "45fda940-b7f0-410c-b31a-b5cd365c28fe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.352s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.492374] env[61974]: DEBUG oslo_concurrency.lockutils [None req-968cf873-dce6-495a-a000-9bde6be4c6c6 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "c06a7599-58e8-4796-9e95-d96327f649d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.464s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.496627] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a41d0d35-e2d9-4855-8630-860ba9a8c252 tempest-MultipleCreateTestJSON-1433517849 tempest-MultipleCreateTestJSON-1433517849-project-member] Lock "eb6dfd21-0ba6-455c-b14e-80dacaf6b92c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.587s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.505260] env[61974]: DEBUG nova.policy [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91e186033f624742a59502ddf87167f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '102785ae1c584cdb925a55afc3412fb9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 987.682453] env[61974]: DEBUG oslo_vmware.api [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379246, 'name': CloneVM_Task} progress is 94%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.841941] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379247, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.184481} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.843169] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 987.844834] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf36ad43-986f-4736-b880-35b00c08d0c6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.877215] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] e6feee04-8aae-4151-8187-3ef4885bcf73/e6feee04-8aae-4151-8187-3ef4885bcf73.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 987.878252] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29da901a-cdef-41ed-a5f3-9c296501c162 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.901825] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 987.901825] env[61974]: value = "task-1379248" [ 987.901825] env[61974]: _type = "Task" [ 987.901825] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.916458] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379248, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.948127] env[61974]: DEBUG nova.compute.manager [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 987.959298] env[61974]: DEBUG nova.network.neutron [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Successfully created port: a2b829bf-e2cb-41c7-a840-499beb350683 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 987.969713] env[61974]: DEBUG nova.objects.instance [None req-e8245646-582d-4ee8-ba6d-cab55ab5776a tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lazy-loading 'flavor' on Instance uuid b1fa5433-8f26-48db-a19d-d1e11245fb44 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 988.184880] env[61974]: DEBUG oslo_vmware.api [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379246, 'name': CloneVM_Task} progress is 94%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.234884] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57609449-a346-46cb-bfb4-38428d5855e8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.244417] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad28c714-18a7-441f-8b2a-32e25998e4e3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.277870] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ae212af-c2fc-40cd-9f3e-4089fe054e20 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.287883] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b56efac-0690-42b2-a097-bd3554cc859b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.304575] env[61974]: DEBUG nova.compute.provider_tree [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 988.320988] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquiring lock "0ce75511-290c-4fea-9657-dfdd8d9efc4b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.321292] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "0ce75511-290c-4fea-9657-dfdd8d9efc4b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.321508] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquiring lock "0ce75511-290c-4fea-9657-dfdd8d9efc4b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.321710] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "0ce75511-290c-4fea-9657-dfdd8d9efc4b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.321891] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "0ce75511-290c-4fea-9657-dfdd8d9efc4b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.324076] env[61974]: INFO nova.compute.manager [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Terminating instance [ 988.326320] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquiring lock "refresh_cache-0ce75511-290c-4fea-9657-dfdd8d9efc4b" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.326781] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquired lock "refresh_cache-0ce75511-290c-4fea-9657-dfdd8d9efc4b" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.327134] env[61974]: DEBUG nova.network.neutron [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 988.414592] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379248, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.478942] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e8245646-582d-4ee8-ba6d-cab55ab5776a tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "refresh_cache-b1fa5433-8f26-48db-a19d-d1e11245fb44" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.478942] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e8245646-582d-4ee8-ba6d-cab55ab5776a tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquired lock "refresh_cache-b1fa5433-8f26-48db-a19d-d1e11245fb44" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.478942] env[61974]: DEBUG nova.network.neutron [None req-e8245646-582d-4ee8-ba6d-cab55ab5776a tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 988.478942] env[61974]: DEBUG nova.objects.instance [None req-e8245646-582d-4ee8-ba6d-cab55ab5776a tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lazy-loading 'info_cache' on Instance uuid b1fa5433-8f26-48db-a19d-d1e11245fb44 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 988.688674] env[61974]: DEBUG oslo_vmware.api [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379246, 'name': CloneVM_Task} progress is 94%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.807745] env[61974]: DEBUG nova.scheduler.client.report [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 988.847583] env[61974]: DEBUG nova.network.neutron [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 988.913789] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379248, 'name': ReconfigVM_Task, 'duration_secs': 0.847179} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.914149] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Reconfigured VM instance instance-00000055 to attach disk [datastore1] e6feee04-8aae-4151-8187-3ef4885bcf73/e6feee04-8aae-4151-8187-3ef4885bcf73.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 988.914924] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bce14577-2f22-4fa6-9f04-9b2532588913 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.932518] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 988.932518] env[61974]: value = "task-1379249" [ 988.932518] env[61974]: _type = "Task" [ 988.932518] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.940727] env[61974]: DEBUG nova.network.neutron [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.945786] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379249, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.970206] env[61974]: DEBUG nova.compute.manager [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 988.983987] env[61974]: DEBUG nova.objects.base [None req-e8245646-582d-4ee8-ba6d-cab55ab5776a tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=61974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 989.001650] env[61974]: DEBUG nova.virt.hardware [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 989.001896] env[61974]: DEBUG nova.virt.hardware [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 989.002078] env[61974]: DEBUG nova.virt.hardware [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 989.002273] env[61974]: DEBUG nova.virt.hardware [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 989.002426] env[61974]: DEBUG nova.virt.hardware [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 989.002740] env[61974]: DEBUG nova.virt.hardware [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 989.002985] env[61974]: DEBUG nova.virt.hardware [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 989.003188] env[61974]: DEBUG nova.virt.hardware [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 989.003435] env[61974]: DEBUG nova.virt.hardware [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 989.003513] env[61974]: DEBUG nova.virt.hardware [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 989.003692] env[61974]: DEBUG nova.virt.hardware [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 989.004900] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-780ee973-315c-4c80-b0d5-210fb2c9bd7f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.015684] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d986e6d9-de79-4ac6-af7b-4274dccb2d66 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.085109] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "603bcf2a-fc99-4ba4-b757-c37d93554870" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.085371] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "603bcf2a-fc99-4ba4-b757-c37d93554870" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.182893] env[61974]: DEBUG oslo_vmware.api [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379246, 'name': CloneVM_Task, 'duration_secs': 1.864311} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.183238] env[61974]: INFO nova.virt.vmwareapi.vmops [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Created linked-clone VM from snapshot [ 989.184019] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ec0e0f-f49a-410a-b2a1-c17914a937d5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.193362] env[61974]: DEBUG nova.virt.vmwareapi.images [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Uploading image 7303d3bd-2aee-4964-855b-6068bc1100ed {{(pid=61974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 989.216173] env[61974]: DEBUG oslo_vmware.rw_handles [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 989.216173] env[61974]: value = "vm-293003" [ 989.216173] env[61974]: _type = "VirtualMachine" [ 989.216173] env[61974]: }. {{(pid=61974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 989.216858] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-36ca138b-8b9f-47a2-8ae7-7003edda892e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.224819] env[61974]: DEBUG oslo_vmware.rw_handles [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lease: (returnval){ [ 989.224819] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52c65ff1-568e-6098-031c-c74ce2e62bc0" [ 989.224819] env[61974]: _type = "HttpNfcLease" [ 989.224819] env[61974]: } obtained for exporting VM: (result){ [ 989.224819] env[61974]: value = "vm-293003" [ 989.224819] env[61974]: _type = "VirtualMachine" [ 989.224819] env[61974]: }. {{(pid=61974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 989.225337] env[61974]: DEBUG oslo_vmware.api [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the lease: (returnval){ [ 989.225337] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52c65ff1-568e-6098-031c-c74ce2e62bc0" [ 989.225337] env[61974]: _type = "HttpNfcLease" [ 989.225337] env[61974]: } to be ready. {{(pid=61974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 989.232459] env[61974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 989.232459] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52c65ff1-568e-6098-031c-c74ce2e62bc0" [ 989.232459] env[61974]: _type = "HttpNfcLease" [ 989.232459] env[61974]: } is initializing. {{(pid=61974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 989.313997] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.380s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.314547] env[61974]: DEBUG nova.compute.manager [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 989.317264] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.313s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.320694] env[61974]: INFO nova.compute.claims [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 989.443337] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379249, 'name': Rename_Task, 'duration_secs': 0.264355} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.443529] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 989.443753] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-673afafc-2cb3-4c6f-9a03-bb498bfee5a6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.447088] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Releasing lock "refresh_cache-0ce75511-290c-4fea-9657-dfdd8d9efc4b" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.447518] env[61974]: DEBUG nova.compute.manager [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 989.447721] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 989.448552] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed85e8d-93f7-4896-a2c9-9f86daabf2bd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.452270] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 989.452270] env[61974]: value = "task-1379251" [ 989.452270] env[61974]: _type = "Task" [ 989.452270] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.457775] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 989.458480] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c56bd9c3-ea31-430c-b9c4-b08fdaf3fccc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.463706] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379251, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.465251] env[61974]: DEBUG oslo_vmware.api [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 989.465251] env[61974]: value = "task-1379252" [ 989.465251] env[61974]: _type = "Task" [ 989.465251] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.475907] env[61974]: DEBUG oslo_vmware.api [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379252, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.587790] env[61974]: DEBUG nova.compute.manager [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 989.734711] env[61974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 989.734711] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52c65ff1-568e-6098-031c-c74ce2e62bc0" [ 989.734711] env[61974]: _type = "HttpNfcLease" [ 989.734711] env[61974]: } is ready. {{(pid=61974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 989.735247] env[61974]: DEBUG oslo_vmware.rw_handles [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 989.735247] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52c65ff1-568e-6098-031c-c74ce2e62bc0" [ 989.735247] env[61974]: _type = "HttpNfcLease" [ 989.735247] env[61974]: }. {{(pid=61974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 989.735883] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83836e50-732e-4c3d-bcb5-422f430777f5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.745072] env[61974]: DEBUG oslo_vmware.rw_handles [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520c2650-a17d-8d4f-26ac-6dae9d0b109a/disk-0.vmdk from lease info. {{(pid=61974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 989.746028] env[61974]: DEBUG oslo_vmware.rw_handles [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520c2650-a17d-8d4f-26ac-6dae9d0b109a/disk-0.vmdk for reading. {{(pid=61974) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 989.809256] env[61974]: DEBUG nova.network.neutron [None req-e8245646-582d-4ee8-ba6d-cab55ab5776a tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Updating instance_info_cache with network_info: [{"id": "7f8230e7-7883-4de2-bf5f-ffa36751a171", "address": "fa:16:3e:5f:98:3d", "network": {"id": "39aeb319-53cd-43fa-bc5e-cb665f2e4707", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2053105632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a1a2f7a8ac448ca8d5e0306eefb1d97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f8230e7-78", "ovs_interfaceid": "7f8230e7-7883-4de2-bf5f-ffa36751a171", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.815314] env[61974]: DEBUG nova.compute.manager [req-0e232c01-3109-43d0-85b2-08a6689eb2a0 req-6a7e0581-eebc-4e69-9489-6eea6be6335e service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Received event network-vif-plugged-a2b829bf-e2cb-41c7-a840-499beb350683 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 989.815633] env[61974]: DEBUG oslo_concurrency.lockutils [req-0e232c01-3109-43d0-85b2-08a6689eb2a0 req-6a7e0581-eebc-4e69-9489-6eea6be6335e service nova] Acquiring lock "9c26e20b-dfc4-432c-a851-499dbea18f01-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.815904] env[61974]: DEBUG oslo_concurrency.lockutils [req-0e232c01-3109-43d0-85b2-08a6689eb2a0 req-6a7e0581-eebc-4e69-9489-6eea6be6335e service nova] Lock "9c26e20b-dfc4-432c-a851-499dbea18f01-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.816089] env[61974]: DEBUG oslo_concurrency.lockutils [req-0e232c01-3109-43d0-85b2-08a6689eb2a0 req-6a7e0581-eebc-4e69-9489-6eea6be6335e service nova] Lock "9c26e20b-dfc4-432c-a851-499dbea18f01-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.816319] env[61974]: DEBUG nova.compute.manager [req-0e232c01-3109-43d0-85b2-08a6689eb2a0 req-6a7e0581-eebc-4e69-9489-6eea6be6335e service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] No waiting events found dispatching network-vif-plugged-a2b829bf-e2cb-41c7-a840-499beb350683 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 989.816615] env[61974]: WARNING nova.compute.manager [req-0e232c01-3109-43d0-85b2-08a6689eb2a0 req-6a7e0581-eebc-4e69-9489-6eea6be6335e service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Received unexpected event network-vif-plugged-a2b829bf-e2cb-41c7-a840-499beb350683 for instance with vm_state building and task_state spawning. [ 989.826015] env[61974]: DEBUG nova.compute.utils [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 989.828138] env[61974]: DEBUG nova.compute.manager [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 989.828233] env[61974]: DEBUG nova.network.neutron [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 989.860459] env[61974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-733c67f1-d73d-4999-a6fb-7be2e9a9133a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.911496] env[61974]: DEBUG nova.policy [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5fb3973c32a645fb82106b90ee5e33a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd104a741ebad47748ae5646356589fce', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 989.964560] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379251, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.975971] env[61974]: DEBUG oslo_vmware.api [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379252, 'name': PowerOffVM_Task, 'duration_secs': 0.268993} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.976680] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 989.977513] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 989.977513] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d3b9619a-48d9-4a9c-8f78-c8b830d2676a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.009783] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 990.010176] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 990.010444] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Deleting the datastore file [datastore2] 0ce75511-290c-4fea-9657-dfdd8d9efc4b {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 990.011580] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e522e12-35c5-40a7-b0fd-d709e6e6f520 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.018888] env[61974]: DEBUG oslo_vmware.api [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for the task: (returnval){ [ 990.018888] env[61974]: value = "task-1379254" [ 990.018888] env[61974]: _type = "Task" [ 990.018888] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.027729] env[61974]: DEBUG oslo_vmware.api [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379254, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.039450] env[61974]: DEBUG nova.network.neutron [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Successfully updated port: a2b829bf-e2cb-41c7-a840-499beb350683 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 990.119981] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.321209] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e8245646-582d-4ee8-ba6d-cab55ab5776a tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Releasing lock "refresh_cache-b1fa5433-8f26-48db-a19d-d1e11245fb44" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.328780] env[61974]: DEBUG nova.compute.manager [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 990.337554] env[61974]: DEBUG nova.network.neutron [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Successfully created port: 8f7e5444-15c1-48c6-8635-b93eb2ee90df {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 990.465670] env[61974]: DEBUG oslo_vmware.api [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379251, 'name': PowerOnVM_Task, 'duration_secs': 0.94691} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.468971] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 990.469256] env[61974]: INFO nova.compute.manager [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Took 8.95 seconds to spawn the instance on the hypervisor. [ 990.469989] env[61974]: DEBUG nova.compute.manager [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 990.471346] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98bbef0f-16e3-47da-b790-1a01082dd0b3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.537746] env[61974]: DEBUG oslo_vmware.api [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Task: {'id': task-1379254, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.200028} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.538794] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 990.540577] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 990.541811] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 990.542133] env[61974]: INFO nova.compute.manager [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Took 1.09 seconds to destroy the instance on the hypervisor. [ 990.542542] env[61974]: DEBUG oslo.service.loopingcall [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 990.543346] env[61974]: DEBUG nova.compute.manager [-] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 990.543439] env[61974]: DEBUG nova.network.neutron [-] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 990.548202] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 990.548348] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquired lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.549390] env[61974]: DEBUG nova.network.neutron [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 990.572649] env[61974]: DEBUG nova.network.neutron [-] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 990.666904] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6adb52b1-3955-4ebb-8c63-6e40d1193127 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.676026] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ddb188-2ab4-4c5a-91e0-15c16d297b4f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.721820] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c34bcb-74e7-46e4-a9bb-b328f83ce0e9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.734454] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5488fbeb-00ee-4030-ab26-d798214d051a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.752679] env[61974]: DEBUG nova.compute.provider_tree [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 990.826591] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8245646-582d-4ee8-ba6d-cab55ab5776a tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 990.826591] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e56b78d-d0bc-4a25-91ab-eaf466e8d872 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.839474] env[61974]: DEBUG oslo_vmware.api [None req-e8245646-582d-4ee8-ba6d-cab55ab5776a tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 990.839474] env[61974]: value = "task-1379255" [ 990.839474] env[61974]: _type = "Task" [ 990.839474] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.850178] env[61974]: DEBUG oslo_vmware.api [None req-e8245646-582d-4ee8-ba6d-cab55ab5776a tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379255, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.999990] env[61974]: INFO nova.compute.manager [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Took 35.80 seconds to build instance. [ 991.076447] env[61974]: DEBUG nova.network.neutron [-] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.088457] env[61974]: DEBUG nova.network.neutron [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 991.230496] env[61974]: DEBUG oslo_concurrency.lockutils [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "90f8acb1-a0b5-4459-a9d7-c12f652b0b51" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.230784] env[61974]: DEBUG oslo_concurrency.lockutils [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "90f8acb1-a0b5-4459-a9d7-c12f652b0b51" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.231027] env[61974]: DEBUG oslo_concurrency.lockutils [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "90f8acb1-a0b5-4459-a9d7-c12f652b0b51-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.231396] env[61974]: DEBUG oslo_concurrency.lockutils [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "90f8acb1-a0b5-4459-a9d7-c12f652b0b51-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.234472] env[61974]: DEBUG oslo_concurrency.lockutils [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "90f8acb1-a0b5-4459-a9d7-c12f652b0b51-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.235877] env[61974]: INFO nova.compute.manager [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Terminating instance [ 991.238130] env[61974]: DEBUG nova.compute.manager [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 991.238335] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 991.239932] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-439e8753-5ff1-4a63-9a96-810094cf72cb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.250479] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 991.250779] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f61fda59-dcb1-46db-8d97-1e3bc53c5033 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.256489] env[61974]: DEBUG nova.scheduler.client.report [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 991.261354] env[61974]: DEBUG oslo_vmware.api [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 991.261354] env[61974]: value = "task-1379256" [ 991.261354] env[61974]: _type = "Task" [ 991.261354] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.275125] env[61974]: DEBUG oslo_vmware.api [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379256, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.329999] env[61974]: DEBUG nova.network.neutron [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Updating instance_info_cache with network_info: [{"id": "a2b829bf-e2cb-41c7-a840-499beb350683", "address": "fa:16:3e:4a:5e:1d", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2b829bf-e2", "ovs_interfaceid": "a2b829bf-e2cb-41c7-a840-499beb350683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.342180] env[61974]: DEBUG nova.compute.manager [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 991.356693] env[61974]: DEBUG oslo_vmware.api [None req-e8245646-582d-4ee8-ba6d-cab55ab5776a tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379255, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.366566] env[61974]: DEBUG nova.virt.hardware [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 991.366885] env[61974]: DEBUG nova.virt.hardware [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 991.367079] env[61974]: DEBUG nova.virt.hardware [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 991.367332] env[61974]: DEBUG nova.virt.hardware [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 991.367524] env[61974]: DEBUG nova.virt.hardware [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 991.367690] env[61974]: DEBUG nova.virt.hardware [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 991.367965] env[61974]: DEBUG nova.virt.hardware [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 991.368114] env[61974]: DEBUG nova.virt.hardware [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 991.368307] env[61974]: DEBUG nova.virt.hardware [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 991.368514] env[61974]: DEBUG nova.virt.hardware [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 991.368723] env[61974]: DEBUG nova.virt.hardware [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 991.369974] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8c45d8-3df0-4348-a871-eaf05f7e55aa {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.379843] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3118718-c882-4cd2-8702-97e958274901 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.503925] env[61974]: DEBUG oslo_concurrency.lockutils [None req-454ff8b4-5ca2-48ee-baba-51f415a88f81 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "e6feee04-8aae-4151-8187-3ef4885bcf73" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.718s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.579297] env[61974]: INFO nova.compute.manager [-] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Took 1.04 seconds to deallocate network for instance. [ 991.763260] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.446s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.763924] env[61974]: DEBUG nova.compute.manager [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 991.767626] env[61974]: DEBUG oslo_concurrency.lockutils [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 7.432s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.780116] env[61974]: DEBUG oslo_vmware.api [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379256, 'name': PowerOffVM_Task, 'duration_secs': 0.307943} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.780479] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 991.780675] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 991.780979] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff6a8695-2d8d-4936-8652-bb03b60759aa {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.786880] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "e6feee04-8aae-4151-8187-3ef4885bcf73" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.786880] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "e6feee04-8aae-4151-8187-3ef4885bcf73" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.790126] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "e6feee04-8aae-4151-8187-3ef4885bcf73-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.790126] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "e6feee04-8aae-4151-8187-3ef4885bcf73-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.790126] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "e6feee04-8aae-4151-8187-3ef4885bcf73-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.792248] env[61974]: INFO nova.compute.manager [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Terminating instance [ 991.797990] env[61974]: DEBUG nova.compute.manager [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 991.798521] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 991.799544] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21371500-efb8-475b-b78f-49e62b81d222 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.816070] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 991.816920] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c70ab9d5-dccb-4420-8576-26d3d21c99ee {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.826045] env[61974]: DEBUG oslo_vmware.api [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 991.826045] env[61974]: value = "task-1379258" [ 991.826045] env[61974]: _type = "Task" [ 991.826045] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.832251] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Releasing lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.832575] env[61974]: DEBUG nova.compute.manager [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Instance network_info: |[{"id": "a2b829bf-e2cb-41c7-a840-499beb350683", "address": "fa:16:3e:4a:5e:1d", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2b829bf-e2", "ovs_interfaceid": "a2b829bf-e2cb-41c7-a840-499beb350683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 991.833017] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:5e:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4db2961d-273d-4634-9d06-a94fa9d384fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a2b829bf-e2cb-41c7-a840-499beb350683', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 991.840751] env[61974]: DEBUG oslo.service.loopingcall [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 991.849354] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 991.850264] env[61974]: DEBUG oslo_vmware.api [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379258, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.851479] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b920251-9ba8-4935-9cbc-3b04c1504943 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.872298] env[61974]: DEBUG nova.compute.manager [req-13767134-d089-4f24-96db-86f090e3580d req-62b6e0e7-bac9-4bac-a58d-1feb048d032f service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Received event network-changed-a2b829bf-e2cb-41c7-a840-499beb350683 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 991.872577] env[61974]: DEBUG nova.compute.manager [req-13767134-d089-4f24-96db-86f090e3580d req-62b6e0e7-bac9-4bac-a58d-1feb048d032f service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Refreshing instance network info cache due to event network-changed-a2b829bf-e2cb-41c7-a840-499beb350683. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 991.872671] env[61974]: DEBUG oslo_concurrency.lockutils [req-13767134-d089-4f24-96db-86f090e3580d req-62b6e0e7-bac9-4bac-a58d-1feb048d032f service nova] Acquiring lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.872817] env[61974]: DEBUG oslo_concurrency.lockutils [req-13767134-d089-4f24-96db-86f090e3580d req-62b6e0e7-bac9-4bac-a58d-1feb048d032f service nova] Acquired lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.872986] env[61974]: DEBUG nova.network.neutron [req-13767134-d089-4f24-96db-86f090e3580d req-62b6e0e7-bac9-4bac-a58d-1feb048d032f service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Refreshing network info cache for port a2b829bf-e2cb-41c7-a840-499beb350683 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 991.876479] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 991.876817] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 991.876955] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Deleting the datastore file [datastore2] 90f8acb1-a0b5-4459-a9d7-c12f652b0b51 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 991.878239] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-450f7e11-ca4b-4974-b9c3-c12d75f36463 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.884243] env[61974]: DEBUG oslo_vmware.api [None req-e8245646-582d-4ee8-ba6d-cab55ab5776a tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379255, 'name': PowerOnVM_Task, 'duration_secs': 0.656585} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.886522] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8245646-582d-4ee8-ba6d-cab55ab5776a tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 991.886741] env[61974]: DEBUG nova.compute.manager [None req-e8245646-582d-4ee8-ba6d-cab55ab5776a tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 991.886968] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 991.886968] env[61974]: value = "task-1379259" [ 991.886968] env[61974]: _type = "Task" [ 991.886968] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.887827] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-771518cf-0b01-4c49-b71c-51eaa063ff54 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.897393] env[61974]: DEBUG oslo_vmware.api [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 991.897393] env[61974]: value = "task-1379260" [ 991.897393] env[61974]: _type = "Task" [ 991.897393] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.910339] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379259, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.916988] env[61974]: DEBUG oslo_vmware.api [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379260, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.040483] env[61974]: DEBUG nova.compute.manager [req-836bd0b2-db3e-49ca-a16b-47dd064886e4 req-04336715-a6a7-4630-bd80-ff7be378050f service nova] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Received event network-vif-plugged-8f7e5444-15c1-48c6-8635-b93eb2ee90df {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 992.040821] env[61974]: DEBUG oslo_concurrency.lockutils [req-836bd0b2-db3e-49ca-a16b-47dd064886e4 req-04336715-a6a7-4630-bd80-ff7be378050f service nova] Acquiring lock "cc048c22-81e0-40fb-9a06-9b84a54e4891-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.041048] env[61974]: DEBUG oslo_concurrency.lockutils [req-836bd0b2-db3e-49ca-a16b-47dd064886e4 req-04336715-a6a7-4630-bd80-ff7be378050f service nova] Lock "cc048c22-81e0-40fb-9a06-9b84a54e4891-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.041282] env[61974]: DEBUG oslo_concurrency.lockutils [req-836bd0b2-db3e-49ca-a16b-47dd064886e4 req-04336715-a6a7-4630-bd80-ff7be378050f service nova] Lock "cc048c22-81e0-40fb-9a06-9b84a54e4891-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.041545] env[61974]: DEBUG nova.compute.manager [req-836bd0b2-db3e-49ca-a16b-47dd064886e4 req-04336715-a6a7-4630-bd80-ff7be378050f service nova] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] No waiting events found dispatching network-vif-plugged-8f7e5444-15c1-48c6-8635-b93eb2ee90df {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 992.042194] env[61974]: WARNING nova.compute.manager [req-836bd0b2-db3e-49ca-a16b-47dd064886e4 req-04336715-a6a7-4630-bd80-ff7be378050f service nova] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Received unexpected event network-vif-plugged-8f7e5444-15c1-48c6-8635-b93eb2ee90df for instance with vm_state building and task_state spawning. [ 992.086749] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.208142] env[61974]: DEBUG nova.network.neutron [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Successfully updated port: 8f7e5444-15c1-48c6-8635-b93eb2ee90df {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 992.276055] env[61974]: DEBUG nova.compute.utils [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 992.280141] env[61974]: INFO nova.compute.claims [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 992.285921] env[61974]: DEBUG nova.compute.manager [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 992.286176] env[61974]: DEBUG nova.network.neutron [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 992.289737] env[61974]: DEBUG nova.compute.manager [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 992.338333] env[61974]: DEBUG oslo_vmware.api [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379258, 'name': PowerOffVM_Task, 'duration_secs': 0.263538} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.339879] env[61974]: DEBUG nova.policy [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '453e22de6c0f478d93d6269ea122d660', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c61c671d85b64b28872586c2816b83f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 992.341815] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 992.342021] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 992.342327] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-61f18790-0b54-4c24-bd0b-87309226c853 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.406271] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379259, 'name': CreateVM_Task, 'duration_secs': 0.492815} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.407015] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 992.407926] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.408242] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.408785] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 992.409155] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16fb51bc-0ea3-488b-bbea-6695ecf21054 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.417561] env[61974]: DEBUG oslo_vmware.api [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379260, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.240488} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.422167] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 992.422167] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 992.422167] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 992.422167] env[61974]: INFO nova.compute.manager [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Took 1.18 seconds to destroy the instance on the hypervisor. [ 992.422167] env[61974]: DEBUG oslo.service.loopingcall [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 992.422547] env[61974]: DEBUG nova.compute.manager [-] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 992.422547] env[61974]: DEBUG nova.network.neutron [-] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 992.424149] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 992.424350] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 992.424551] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Deleting the datastore file [datastore1] e6feee04-8aae-4151-8187-3ef4885bcf73 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 992.426168] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-449ac100-cd52-44e6-8850-b34b5afe547d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.428707] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 992.428707] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52634c4a-900c-0ac5-6e8f-7b587ae67999" [ 992.428707] env[61974]: _type = "Task" [ 992.428707] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.438946] env[61974]: DEBUG oslo_vmware.api [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 992.438946] env[61974]: value = "task-1379262" [ 992.438946] env[61974]: _type = "Task" [ 992.438946] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.448086] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52634c4a-900c-0ac5-6e8f-7b587ae67999, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.455971] env[61974]: DEBUG oslo_vmware.api [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379262, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.710684] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "refresh_cache-cc048c22-81e0-40fb-9a06-9b84a54e4891" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.710845] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired lock "refresh_cache-cc048c22-81e0-40fb-9a06-9b84a54e4891" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.711032] env[61974]: DEBUG nova.network.neutron [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 992.761519] env[61974]: DEBUG nova.network.neutron [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Successfully created port: f32df777-3ba7-47f1-9845-8327f4f53fe8 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 992.794928] env[61974]: INFO nova.compute.resource_tracker [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Updating resource usage from migration 0d48f5de-f68d-4bff-830f-7e38a2227b8a [ 992.862538] env[61974]: DEBUG nova.network.neutron [req-13767134-d089-4f24-96db-86f090e3580d req-62b6e0e7-bac9-4bac-a58d-1feb048d032f service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Updated VIF entry in instance network info cache for port a2b829bf-e2cb-41c7-a840-499beb350683. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 992.862910] env[61974]: DEBUG nova.network.neutron [req-13767134-d089-4f24-96db-86f090e3580d req-62b6e0e7-bac9-4bac-a58d-1feb048d032f service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Updating instance_info_cache with network_info: [{"id": "a2b829bf-e2cb-41c7-a840-499beb350683", "address": "fa:16:3e:4a:5e:1d", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2b829bf-e2", "ovs_interfaceid": "a2b829bf-e2cb-41c7-a840-499beb350683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.943599] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52634c4a-900c-0ac5-6e8f-7b587ae67999, 'name': SearchDatastore_Task, 'duration_secs': 0.025312} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.946846] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.948021] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 992.948021] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.948021] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.948021] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 992.948391] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bed26db2-ca90-48c6-b61f-d27468cae6ad {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.958893] env[61974]: DEBUG oslo_vmware.api [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379262, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.37444} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.959979] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 992.960201] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 992.960387] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 992.960572] env[61974]: INFO nova.compute.manager [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Took 1.16 seconds to destroy the instance on the hypervisor. [ 992.960882] env[61974]: DEBUG oslo.service.loopingcall [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 992.961054] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 992.961218] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 992.962486] env[61974]: DEBUG nova.compute.manager [-] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 992.962486] env[61974]: DEBUG nova.network.neutron [-] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 992.963623] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe801d8d-0003-4314-ac5c-c597d3c9209e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.971071] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 992.971071] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52847ead-0832-ec65-23de-68187efd5dff" [ 992.971071] env[61974]: _type = "Task" [ 992.971071] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.983364] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52847ead-0832-ec65-23de-68187efd5dff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.089668] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c586d1-1aed-4884-b469-e1a6681f98ed {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.099631] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3177e5-0de9-40e9-89c2-7566a90357cd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.131399] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e58964-b4b4-4fcd-872c-bac34820d8e6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.140269] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e760c0-3245-41a9-83d0-8fa8ac56bb0c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.158152] env[61974]: DEBUG nova.compute.provider_tree [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.265492] env[61974]: DEBUG nova.network.neutron [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 993.307077] env[61974]: DEBUG nova.compute.manager [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 993.333978] env[61974]: DEBUG nova.virt.hardware [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 993.334272] env[61974]: DEBUG nova.virt.hardware [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 993.334437] env[61974]: DEBUG nova.virt.hardware [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 993.334625] env[61974]: DEBUG nova.virt.hardware [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 993.334780] env[61974]: DEBUG nova.virt.hardware [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 993.334933] env[61974]: DEBUG nova.virt.hardware [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 993.335588] env[61974]: DEBUG nova.virt.hardware [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 993.335588] env[61974]: DEBUG nova.virt.hardware [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 993.335588] env[61974]: DEBUG nova.virt.hardware [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 993.335760] env[61974]: DEBUG nova.virt.hardware [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 993.335811] env[61974]: DEBUG nova.virt.hardware [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 993.336718] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498d4f74-9e37-4ff9-a104-50edee833849 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.353443] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d4d5c38-6a76-4b64-8df6-d296238dde2d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.376034] env[61974]: DEBUG oslo_concurrency.lockutils [req-13767134-d089-4f24-96db-86f090e3580d req-62b6e0e7-bac9-4bac-a58d-1feb048d032f service nova] Releasing lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.435580] env[61974]: DEBUG nova.network.neutron [-] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.467068] env[61974]: DEBUG nova.network.neutron [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Updating instance_info_cache with network_info: [{"id": "8f7e5444-15c1-48c6-8635-b93eb2ee90df", "address": "fa:16:3e:d7:6a:cc", "network": {"id": "615a7a34-a392-45bd-ba4d-7b39605e520b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432153827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d104a741ebad47748ae5646356589fce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f7e5444-15", "ovs_interfaceid": "8f7e5444-15c1-48c6-8635-b93eb2ee90df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.481210] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52847ead-0832-ec65-23de-68187efd5dff, 'name': SearchDatastore_Task, 'duration_secs': 0.012415} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.482600] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af3995d7-6682-442e-b965-f1ad68ec878b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.489214] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 993.489214] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]528f8f0c-9d48-1439-26e3-be08039eec1e" [ 993.489214] env[61974]: _type = "Task" [ 993.489214] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.497661] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]528f8f0c-9d48-1439-26e3-be08039eec1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.661021] env[61974]: DEBUG nova.scheduler.client.report [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 993.776911] env[61974]: DEBUG oslo_concurrency.lockutils [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "a2fbbc4a-92da-4917-a73e-a37a8980c62c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.777303] env[61974]: DEBUG oslo_concurrency.lockutils [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "a2fbbc4a-92da-4917-a73e-a37a8980c62c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.881390] env[61974]: DEBUG nova.compute.manager [req-26b89a6a-d093-4575-a14a-54cd8f6d1af8 req-ba4abbbb-3752-4346-ba4b-0859737a066a service nova] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Received event network-vif-deleted-cdd55c8c-057f-4dfc-b401-906339145fc3 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 993.881685] env[61974]: DEBUG nova.compute.manager [req-26b89a6a-d093-4575-a14a-54cd8f6d1af8 req-ba4abbbb-3752-4346-ba4b-0859737a066a service nova] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Received event network-vif-deleted-e51d39a0-9311-4ac6-95ed-ddf57e132aa6 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 993.881920] env[61974]: INFO nova.compute.manager [req-26b89a6a-d093-4575-a14a-54cd8f6d1af8 req-ba4abbbb-3752-4346-ba4b-0859737a066a service nova] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Neutron deleted interface e51d39a0-9311-4ac6-95ed-ddf57e132aa6; detaching it from the instance and deleting it from the info cache [ 993.882153] env[61974]: DEBUG nova.network.neutron [req-26b89a6a-d093-4575-a14a-54cd8f6d1af8 req-ba4abbbb-3752-4346-ba4b-0859737a066a service nova] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.883642] env[61974]: DEBUG nova.network.neutron [-] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.938378] env[61974]: INFO nova.compute.manager [-] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Took 1.52 seconds to deallocate network for instance. [ 993.970167] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Releasing lock "refresh_cache-cc048c22-81e0-40fb-9a06-9b84a54e4891" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.970661] env[61974]: DEBUG nova.compute.manager [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Instance network_info: |[{"id": "8f7e5444-15c1-48c6-8635-b93eb2ee90df", "address": "fa:16:3e:d7:6a:cc", "network": {"id": "615a7a34-a392-45bd-ba4d-7b39605e520b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432153827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d104a741ebad47748ae5646356589fce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f7e5444-15", "ovs_interfaceid": "8f7e5444-15c1-48c6-8635-b93eb2ee90df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 993.971255] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:6a:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa09e855-8af1-419b-b78d-8ffcc94b1bfb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8f7e5444-15c1-48c6-8635-b93eb2ee90df', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 993.981848] env[61974]: DEBUG oslo.service.loopingcall [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 993.983077] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 993.983448] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da5dc1fb-a52a-434f-8e9a-4cd2e392ec07 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.016197] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]528f8f0c-9d48-1439-26e3-be08039eec1e, 'name': SearchDatastore_Task, 'duration_secs': 0.015227} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.017813] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.018148] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 9c26e20b-dfc4-432c-a851-499dbea18f01/9c26e20b-dfc4-432c-a851-499dbea18f01.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 994.018415] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 994.018415] env[61974]: value = "task-1379263" [ 994.018415] env[61974]: _type = "Task" [ 994.018415] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.019085] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-91c0d2c0-c7b3-43db-b8e4-248e81969adc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.031578] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379263, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.032574] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 994.032574] env[61974]: value = "task-1379264" [ 994.032574] env[61974]: _type = "Task" [ 994.032574] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.042671] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379264, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.075617] env[61974]: DEBUG nova.compute.manager [req-55bff9cd-42dc-4361-84b7-899383a35aa2 req-fa2ebdc9-1b70-474f-8c3e-ab57d01b01be service nova] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Received event network-changed-8f7e5444-15c1-48c6-8635-b93eb2ee90df {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 994.075743] env[61974]: DEBUG nova.compute.manager [req-55bff9cd-42dc-4361-84b7-899383a35aa2 req-fa2ebdc9-1b70-474f-8c3e-ab57d01b01be service nova] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Refreshing instance network info cache due to event network-changed-8f7e5444-15c1-48c6-8635-b93eb2ee90df. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 994.075924] env[61974]: DEBUG oslo_concurrency.lockutils [req-55bff9cd-42dc-4361-84b7-899383a35aa2 req-fa2ebdc9-1b70-474f-8c3e-ab57d01b01be service nova] Acquiring lock "refresh_cache-cc048c22-81e0-40fb-9a06-9b84a54e4891" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.076090] env[61974]: DEBUG oslo_concurrency.lockutils [req-55bff9cd-42dc-4361-84b7-899383a35aa2 req-fa2ebdc9-1b70-474f-8c3e-ab57d01b01be service nova] Acquired lock "refresh_cache-cc048c22-81e0-40fb-9a06-9b84a54e4891" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.076261] env[61974]: DEBUG nova.network.neutron [req-55bff9cd-42dc-4361-84b7-899383a35aa2 req-fa2ebdc9-1b70-474f-8c3e-ab57d01b01be service nova] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Refreshing network info cache for port 8f7e5444-15c1-48c6-8635-b93eb2ee90df {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 994.165963] env[61974]: DEBUG oslo_concurrency.lockutils [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.398s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.166174] env[61974]: INFO nova.compute.manager [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Migrating [ 994.166471] env[61974]: DEBUG oslo_concurrency.lockutils [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.166647] env[61974]: DEBUG oslo_concurrency.lockutils [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "compute-rpcapi-router" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.168165] env[61974]: DEBUG oslo_concurrency.lockutils [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.531s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.169878] env[61974]: INFO nova.compute.claims [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 994.280395] env[61974]: DEBUG nova.compute.manager [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 994.385151] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cbeb7f3b-fbdf-461d-b34a-dcb093ee1018 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.387348] env[61974]: INFO nova.compute.manager [-] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Took 1.43 seconds to deallocate network for instance. [ 994.399686] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41230c30-78d4-4c62-909e-c3f41195137e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.437311] env[61974]: DEBUG nova.compute.manager [req-26b89a6a-d093-4575-a14a-54cd8f6d1af8 req-ba4abbbb-3752-4346-ba4b-0859737a066a service nova] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Detach interface failed, port_id=e51d39a0-9311-4ac6-95ed-ddf57e132aa6, reason: Instance e6feee04-8aae-4151-8187-3ef4885bcf73 could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 994.445351] env[61974]: DEBUG oslo_concurrency.lockutils [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.486128] env[61974]: DEBUG nova.network.neutron [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Successfully updated port: f32df777-3ba7-47f1-9845-8327f4f53fe8 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 994.532150] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379263, 'name': CreateVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.543388] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379264, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.674056] env[61974]: INFO nova.compute.rpcapi [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 994.674871] env[61974]: DEBUG oslo_concurrency.lockutils [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "compute-rpcapi-router" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.808664] env[61974]: DEBUG oslo_concurrency.lockutils [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.869961] env[61974]: DEBUG nova.network.neutron [req-55bff9cd-42dc-4361-84b7-899383a35aa2 req-fa2ebdc9-1b70-474f-8c3e-ab57d01b01be service nova] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Updated VIF entry in instance network info cache for port 8f7e5444-15c1-48c6-8635-b93eb2ee90df. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 994.870266] env[61974]: DEBUG nova.network.neutron [req-55bff9cd-42dc-4361-84b7-899383a35aa2 req-fa2ebdc9-1b70-474f-8c3e-ab57d01b01be service nova] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Updating instance_info_cache with network_info: [{"id": "8f7e5444-15c1-48c6-8635-b93eb2ee90df", "address": "fa:16:3e:d7:6a:cc", "network": {"id": "615a7a34-a392-45bd-ba4d-7b39605e520b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432153827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d104a741ebad47748ae5646356589fce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f7e5444-15", "ovs_interfaceid": "8f7e5444-15c1-48c6-8635-b93eb2ee90df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.895286] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.989199] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "refresh_cache-1a04b388-8739-4b46-a8e1-cd79835bcf48" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.989353] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquired lock "refresh_cache-1a04b388-8739-4b46-a8e1-cd79835bcf48" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.990022] env[61974]: DEBUG nova.network.neutron [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 994.997584] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b6d9ee-6dcb-4867-8799-38935a368fe0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.006663] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd8b21f-176b-4ab0-85b0-91ec28f279c8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.046095] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68befde0-8301-4a87-895f-aba122649494 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.060323] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379263, 'name': CreateVM_Task, 'duration_secs': 0.557719} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.060617] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379264, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.954407} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.060860] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 995.061154] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 9c26e20b-dfc4-432c-a851-499dbea18f01/9c26e20b-dfc4-432c-a851-499dbea18f01.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 995.061381] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 995.062618] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aebca66-4dc2-465f-88f5-5d491fe79c82 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.067010] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.067195] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.067801] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 995.068011] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ae737f01-371b-48b0-a70b-55ffe6be44a1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.069839] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f7f803d-d7b2-46bb-bc27-f8df17b4131b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.087877] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 995.087877] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5298703f-aff8-9896-0d87-1e4cd96903c3" [ 995.087877] env[61974]: _type = "Task" [ 995.087877] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.088393] env[61974]: DEBUG nova.compute.provider_tree [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.092222] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 995.092222] env[61974]: value = "task-1379265" [ 995.092222] env[61974]: _type = "Task" [ 995.092222] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.107315] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5298703f-aff8-9896-0d87-1e4cd96903c3, 'name': SearchDatastore_Task, 'duration_secs': 0.013877} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.110794] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.111105] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 995.111388] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.111547] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.111755] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 995.112239] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379265, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.112805] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d287b59-2eb8-488e-b7cb-c8c5058dd2ce {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.123504] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 995.123609] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 995.124410] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0faf4ff5-745a-4fd7-94b6-b87caab27fa9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.131136] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 995.131136] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52d7658a-9d82-3337-94fe-aa48283cf49c" [ 995.131136] env[61974]: _type = "Task" [ 995.131136] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.141246] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d7658a-9d82-3337-94fe-aa48283cf49c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.202192] env[61974]: DEBUG oslo_concurrency.lockutils [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "refresh_cache-abe0168a-e838-468a-a223-7c2a64497c0c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.202484] env[61974]: DEBUG oslo_concurrency.lockutils [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "refresh_cache-abe0168a-e838-468a-a223-7c2a64497c0c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.202646] env[61974]: DEBUG nova.network.neutron [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 995.373785] env[61974]: DEBUG oslo_concurrency.lockutils [req-55bff9cd-42dc-4361-84b7-899383a35aa2 req-fa2ebdc9-1b70-474f-8c3e-ab57d01b01be service nova] Releasing lock "refresh_cache-cc048c22-81e0-40fb-9a06-9b84a54e4891" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.521201] env[61974]: DEBUG nova.network.neutron [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 995.592295] env[61974]: DEBUG nova.scheduler.client.report [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 995.606447] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379265, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074593} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.606447] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 995.607450] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c32c01e-6150-4f73-9cc4-50cfe1808f7b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.632786] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 9c26e20b-dfc4-432c-a851-499dbea18f01/9c26e20b-dfc4-432c-a851-499dbea18f01.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 995.635949] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c5ed9c0-5fb1-444f-9433-160bb7ab29a7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.663920] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d7658a-9d82-3337-94fe-aa48283cf49c, 'name': SearchDatastore_Task, 'duration_secs': 0.011722} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.665952] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 995.665952] env[61974]: value = "task-1379266" [ 995.665952] env[61974]: _type = "Task" [ 995.665952] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.666229] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7074a4e-0827-486c-b5ab-1422b497e3ee {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.675663] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 995.675663] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5208b40c-dcf5-4fad-3686-5016a2db6be8" [ 995.675663] env[61974]: _type = "Task" [ 995.675663] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.679231] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379266, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.689209] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5208b40c-dcf5-4fad-3686-5016a2db6be8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.691822] env[61974]: DEBUG nova.network.neutron [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Updating instance_info_cache with network_info: [{"id": "f32df777-3ba7-47f1-9845-8327f4f53fe8", "address": "fa:16:3e:91:1f:af", "network": {"id": "be36ebfc-3548-4420-b5b4-b3efb499516a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1190763400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61c671d85b64b28872586c2816b83f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf32df777-3b", "ovs_interfaceid": "f32df777-3ba7-47f1-9845-8327f4f53fe8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.914734] env[61974]: DEBUG nova.network.neutron [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Updating instance_info_cache with network_info: [{"id": "f2f9e10a-4e37-47fa-8040-638e6376acc6", "address": "fa:16:3e:50:e7:ee", "network": {"id": "b42774a0-686b-4132-a599-7cec777b9919", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1826867553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dae05232e0041dba49b0432d64d82d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2f9e10a-4e", "ovs_interfaceid": "f2f9e10a-4e37-47fa-8040-638e6376acc6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.101542] env[61974]: DEBUG oslo_concurrency.lockutils [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.933s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.102112] env[61974]: DEBUG nova.compute.manager [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 996.105497] env[61974]: DEBUG nova.compute.manager [req-30a62d31-94b3-427e-b117-33440445e0e3 req-453f03f6-cd19-40b9-b4f3-1b4f474db6e0 service nova] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Received event network-vif-plugged-f32df777-3ba7-47f1-9845-8327f4f53fe8 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 996.105758] env[61974]: DEBUG oslo_concurrency.lockutils [req-30a62d31-94b3-427e-b117-33440445e0e3 req-453f03f6-cd19-40b9-b4f3-1b4f474db6e0 service nova] Acquiring lock "1a04b388-8739-4b46-a8e1-cd79835bcf48-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.106018] env[61974]: DEBUG oslo_concurrency.lockutils [req-30a62d31-94b3-427e-b117-33440445e0e3 req-453f03f6-cd19-40b9-b4f3-1b4f474db6e0 service nova] Lock "1a04b388-8739-4b46-a8e1-cd79835bcf48-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.106242] env[61974]: DEBUG oslo_concurrency.lockutils [req-30a62d31-94b3-427e-b117-33440445e0e3 req-453f03f6-cd19-40b9-b4f3-1b4f474db6e0 service nova] Lock "1a04b388-8739-4b46-a8e1-cd79835bcf48-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.106457] env[61974]: DEBUG nova.compute.manager [req-30a62d31-94b3-427e-b117-33440445e0e3 req-453f03f6-cd19-40b9-b4f3-1b4f474db6e0 service nova] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] No waiting events found dispatching network-vif-plugged-f32df777-3ba7-47f1-9845-8327f4f53fe8 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 996.106673] env[61974]: WARNING nova.compute.manager [req-30a62d31-94b3-427e-b117-33440445e0e3 req-453f03f6-cd19-40b9-b4f3-1b4f474db6e0 service nova] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Received unexpected event network-vif-plugged-f32df777-3ba7-47f1-9845-8327f4f53fe8 for instance with vm_state building and task_state spawning. [ 996.106881] env[61974]: DEBUG nova.compute.manager [req-30a62d31-94b3-427e-b117-33440445e0e3 req-453f03f6-cd19-40b9-b4f3-1b4f474db6e0 service nova] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Received event network-changed-f32df777-3ba7-47f1-9845-8327f4f53fe8 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 996.107092] env[61974]: DEBUG nova.compute.manager [req-30a62d31-94b3-427e-b117-33440445e0e3 req-453f03f6-cd19-40b9-b4f3-1b4f474db6e0 service nova] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Refreshing instance network info cache due to event network-changed-f32df777-3ba7-47f1-9845-8327f4f53fe8. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 996.107362] env[61974]: DEBUG oslo_concurrency.lockutils [req-30a62d31-94b3-427e-b117-33440445e0e3 req-453f03f6-cd19-40b9-b4f3-1b4f474db6e0 service nova] Acquiring lock "refresh_cache-1a04b388-8739-4b46-a8e1-cd79835bcf48" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.107851] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.988s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.109395] env[61974]: INFO nova.compute.claims [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 996.179420] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379266, 'name': ReconfigVM_Task, 'duration_secs': 0.394089} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.179798] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 9c26e20b-dfc4-432c-a851-499dbea18f01/9c26e20b-dfc4-432c-a851-499dbea18f01.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 996.183903] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-69e172df-2ffd-4140-9ed6-3e5c1ddcaf66 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.193758] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Releasing lock "refresh_cache-1a04b388-8739-4b46-a8e1-cd79835bcf48" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.194101] env[61974]: DEBUG nova.compute.manager [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Instance network_info: |[{"id": "f32df777-3ba7-47f1-9845-8327f4f53fe8", "address": "fa:16:3e:91:1f:af", "network": {"id": "be36ebfc-3548-4420-b5b4-b3efb499516a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1190763400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61c671d85b64b28872586c2816b83f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf32df777-3b", "ovs_interfaceid": "f32df777-3ba7-47f1-9845-8327f4f53fe8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 996.194438] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5208b40c-dcf5-4fad-3686-5016a2db6be8, 'name': SearchDatastore_Task, 'duration_secs': 0.015883} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.195881] env[61974]: DEBUG oslo_concurrency.lockutils [req-30a62d31-94b3-427e-b117-33440445e0e3 req-453f03f6-cd19-40b9-b4f3-1b4f474db6e0 service nova] Acquired lock "refresh_cache-1a04b388-8739-4b46-a8e1-cd79835bcf48" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.196104] env[61974]: DEBUG nova.network.neutron [req-30a62d31-94b3-427e-b117-33440445e0e3 req-453f03f6-cd19-40b9-b4f3-1b4f474db6e0 service nova] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Refreshing network info cache for port f32df777-3ba7-47f1-9845-8327f4f53fe8 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 996.197419] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:1f:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e05affa-2640-435e-a124-0ee8a6ab1152', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f32df777-3ba7-47f1-9845-8327f4f53fe8', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 996.204901] env[61974]: DEBUG oslo.service.loopingcall [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 996.205175] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.205430] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] cc048c22-81e0-40fb-9a06-9b84a54e4891/cc048c22-81e0-40fb-9a06-9b84a54e4891.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 996.205793] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 996.205793] env[61974]: value = "task-1379267" [ 996.205793] env[61974]: _type = "Task" [ 996.205793] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.208747] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 996.208889] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5edeed90-19f4-44ba-ba84-524b705daf6c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.211771] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-db128f3c-945c-47ac-8887-9b0be33ac2d2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.236885] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379267, 'name': Rename_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.239313] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 996.239313] env[61974]: value = "task-1379268" [ 996.239313] env[61974]: _type = "Task" [ 996.239313] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.239542] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 996.239542] env[61974]: value = "task-1379269" [ 996.239542] env[61974]: _type = "Task" [ 996.239542] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.252488] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379268, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.255674] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379269, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.417713] env[61974]: DEBUG oslo_concurrency.lockutils [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "refresh_cache-abe0168a-e838-468a-a223-7c2a64497c0c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.509468] env[61974]: DEBUG nova.network.neutron [req-30a62d31-94b3-427e-b117-33440445e0e3 req-453f03f6-cd19-40b9-b4f3-1b4f474db6e0 service nova] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Updated VIF entry in instance network info cache for port f32df777-3ba7-47f1-9845-8327f4f53fe8. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 996.509932] env[61974]: DEBUG nova.network.neutron [req-30a62d31-94b3-427e-b117-33440445e0e3 req-453f03f6-cd19-40b9-b4f3-1b4f474db6e0 service nova] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Updating instance_info_cache with network_info: [{"id": "f32df777-3ba7-47f1-9845-8327f4f53fe8", "address": "fa:16:3e:91:1f:af", "network": {"id": "be36ebfc-3548-4420-b5b4-b3efb499516a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1190763400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61c671d85b64b28872586c2816b83f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf32df777-3b", "ovs_interfaceid": "f32df777-3ba7-47f1-9845-8327f4f53fe8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.610019] env[61974]: DEBUG nova.compute.utils [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 996.612438] env[61974]: DEBUG nova.compute.manager [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 996.612698] env[61974]: DEBUG nova.network.neutron [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 996.656875] env[61974]: DEBUG nova.policy [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f596350bbdfe4dbfb1be74e9525aefa7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '09232b7456d94a96acefecf6098bd274', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 996.723722] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379267, 'name': Rename_Task, 'duration_secs': 0.255554} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.724114] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 996.724401] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91899a4e-1574-4732-8127-001cc031c441 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.732151] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 996.732151] env[61974]: value = "task-1379270" [ 996.732151] env[61974]: _type = "Task" [ 996.732151] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.741635] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379270, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.756309] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379268, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.760602] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379269, 'name': CreateVM_Task} progress is 25%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.962642] env[61974]: DEBUG nova.network.neutron [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Successfully created port: 8e4ec8ca-7d59-4a5f-af91-ca1c71946996 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 997.012946] env[61974]: DEBUG oslo_concurrency.lockutils [req-30a62d31-94b3-427e-b117-33440445e0e3 req-453f03f6-cd19-40b9-b4f3-1b4f474db6e0 service nova] Releasing lock "refresh_cache-1a04b388-8739-4b46-a8e1-cd79835bcf48" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.117340] env[61974]: DEBUG nova.compute.manager [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 997.246120] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379270, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.261915] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379268, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.689637} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.268797] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] cc048c22-81e0-40fb-9a06-9b84a54e4891/cc048c22-81e0-40fb-9a06-9b84a54e4891.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 997.269082] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 997.269345] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379269, 'name': CreateVM_Task, 'duration_secs': 0.681904} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.269786] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9fcd0586-deaf-444c-bf60-9e3538d6c8b3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.271846] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 997.272563] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.272733] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.273073] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 997.273733] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fc9a0b5-a5e5-46b1-8bb0-a2ffbfc8a140 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.279264] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 997.279264] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52d198d1-7181-7192-2c65-cf3320f147e7" [ 997.279264] env[61974]: _type = "Task" [ 997.279264] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.289454] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 997.289454] env[61974]: value = "task-1379271" [ 997.289454] env[61974]: _type = "Task" [ 997.289454] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.299602] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d198d1-7181-7192-2c65-cf3320f147e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.307475] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379271, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.461602] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9e16d3-0b3b-48ec-a1ed-8cd83add6ba3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.469820] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3523274f-e3a6-41c9-9ded-12f623789308 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.502300] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503cdd8d-742c-4275-8b57-ba1e2c36cb92 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.511160] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41151671-d30d-430b-a52b-5c4bf2f9d5f7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.525770] env[61974]: DEBUG nova.compute.provider_tree [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.744214] env[61974]: DEBUG oslo_vmware.api [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379270, 'name': PowerOnVM_Task, 'duration_secs': 0.666722} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.744447] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 997.744657] env[61974]: INFO nova.compute.manager [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Took 8.77 seconds to spawn the instance on the hypervisor. [ 997.744915] env[61974]: DEBUG nova.compute.manager [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 997.745689] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69af0d77-16e3-4f49-a9b0-5d37b365a2f7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.791463] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d198d1-7181-7192-2c65-cf3320f147e7, 'name': SearchDatastore_Task, 'duration_secs': 0.017005} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.794748] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.795026] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 997.795273] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.795444] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.795644] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 997.796224] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21de7d67-904f-4925-a68d-4e15dc11455e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.804058] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379271, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.134903} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.804354] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 997.805180] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dbfe082-4ea9-49ad-bfc8-b1d5f9ed8161 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.808723] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 997.808921] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 997.809990] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4af44f35-a116-44b6-80b5-007730fa6b1a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.829752] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] cc048c22-81e0-40fb-9a06-9b84a54e4891/cc048c22-81e0-40fb-9a06-9b84a54e4891.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 997.830416] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09acc84f-649f-4645-9616-c6333171ba4e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.845664] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 997.845664] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52f17700-b95a-6729-45dc-29c4417ed358" [ 997.845664] env[61974]: _type = "Task" [ 997.845664] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.851651] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 997.851651] env[61974]: value = "task-1379272" [ 997.851651] env[61974]: _type = "Task" [ 997.851651] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.854596] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52f17700-b95a-6729-45dc-29c4417ed358, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.862619] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379272, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.939733] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c418902-9cf4-4a88-b399-36567ac3bfb9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.959303] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Updating instance 'abe0168a-e838-468a-a223-7c2a64497c0c' progress to 0 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 998.029034] env[61974]: DEBUG nova.scheduler.client.report [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 998.130568] env[61974]: DEBUG nova.compute.manager [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 998.151887] env[61974]: DEBUG nova.virt.hardware [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 998.152228] env[61974]: DEBUG nova.virt.hardware [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 998.152420] env[61974]: DEBUG nova.virt.hardware [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 998.152615] env[61974]: DEBUG nova.virt.hardware [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 998.152769] env[61974]: DEBUG nova.virt.hardware [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 998.152921] env[61974]: DEBUG nova.virt.hardware [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 998.153156] env[61974]: DEBUG nova.virt.hardware [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 998.153325] env[61974]: DEBUG nova.virt.hardware [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 998.153501] env[61974]: DEBUG nova.virt.hardware [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 998.153685] env[61974]: DEBUG nova.virt.hardware [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 998.153896] env[61974]: DEBUG nova.virt.hardware [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 998.154816] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd6836f1-db37-4ebf-8c26-ed4f55284004 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.163693] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba582741-8493-4be5-9737-fcf4e85c00bf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.265706] env[61974]: INFO nova.compute.manager [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Took 38.86 seconds to build instance. [ 998.357382] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52f17700-b95a-6729-45dc-29c4417ed358, 'name': SearchDatastore_Task, 'duration_secs': 0.039885} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.361217] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebc9b196-ed78-48b8-aa59-3bbf1e83e483 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.367293] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 998.367293] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52b7a004-fdc4-2bcc-dd1c-4e57bddc0ad2" [ 998.367293] env[61974]: _type = "Task" [ 998.367293] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.370690] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379272, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.379667] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52b7a004-fdc4-2bcc-dd1c-4e57bddc0ad2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.468768] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 998.468768] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fe1051b9-7ede-40ca-8084-b251e54c078f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.476857] env[61974]: DEBUG oslo_vmware.api [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 998.476857] env[61974]: value = "task-1379273" [ 998.476857] env[61974]: _type = "Task" [ 998.476857] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.488176] env[61974]: DEBUG oslo_vmware.api [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379273, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.534694] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.427s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.535443] env[61974]: DEBUG nova.compute.manager [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 998.538215] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.452s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.538503] env[61974]: DEBUG nova.objects.instance [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lazy-loading 'resources' on Instance uuid 0ce75511-290c-4fea-9657-dfdd8d9efc4b {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.715681] env[61974]: DEBUG nova.compute.manager [req-ea686d39-876e-4fba-9796-8f1560f76f82 req-6bae65ce-3ccb-409d-be07-1bde5a33bfa7 service nova] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Received event network-vif-plugged-8e4ec8ca-7d59-4a5f-af91-ca1c71946996 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 998.716065] env[61974]: DEBUG oslo_concurrency.lockutils [req-ea686d39-876e-4fba-9796-8f1560f76f82 req-6bae65ce-3ccb-409d-be07-1bde5a33bfa7 service nova] Acquiring lock "ef17d87d-31ae-4d08-afba-157521e7d1e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.716406] env[61974]: DEBUG oslo_concurrency.lockutils [req-ea686d39-876e-4fba-9796-8f1560f76f82 req-6bae65ce-3ccb-409d-be07-1bde5a33bfa7 service nova] Lock "ef17d87d-31ae-4d08-afba-157521e7d1e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.716705] env[61974]: DEBUG oslo_concurrency.lockutils [req-ea686d39-876e-4fba-9796-8f1560f76f82 req-6bae65ce-3ccb-409d-be07-1bde5a33bfa7 service nova] Lock "ef17d87d-31ae-4d08-afba-157521e7d1e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.716981] env[61974]: DEBUG nova.compute.manager [req-ea686d39-876e-4fba-9796-8f1560f76f82 req-6bae65ce-3ccb-409d-be07-1bde5a33bfa7 service nova] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] No waiting events found dispatching network-vif-plugged-8e4ec8ca-7d59-4a5f-af91-ca1c71946996 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 998.717275] env[61974]: WARNING nova.compute.manager [req-ea686d39-876e-4fba-9796-8f1560f76f82 req-6bae65ce-3ccb-409d-be07-1bde5a33bfa7 service nova] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Received unexpected event network-vif-plugged-8e4ec8ca-7d59-4a5f-af91-ca1c71946996 for instance with vm_state building and task_state spawning. [ 998.768648] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f5eadff-7c2b-4a32-97e7-811e50635b8f tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "9c26e20b-dfc4-432c-a851-499dbea18f01" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.373s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.829143] env[61974]: DEBUG oslo_vmware.rw_handles [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520c2650-a17d-8d4f-26ac-6dae9d0b109a/disk-0.vmdk. {{(pid=61974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 998.830173] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03213823-6675-4568-980c-25aca6dd04e1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.837348] env[61974]: DEBUG oslo_vmware.rw_handles [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520c2650-a17d-8d4f-26ac-6dae9d0b109a/disk-0.vmdk is in state: ready. {{(pid=61974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 998.837519] env[61974]: ERROR oslo_vmware.rw_handles [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520c2650-a17d-8d4f-26ac-6dae9d0b109a/disk-0.vmdk due to incomplete transfer. [ 998.837763] env[61974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e7e940cb-6f5f-44fe-adae-f5b18c6be135 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.846010] env[61974]: DEBUG oslo_vmware.rw_handles [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520c2650-a17d-8d4f-26ac-6dae9d0b109a/disk-0.vmdk. {{(pid=61974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 998.846266] env[61974]: DEBUG nova.virt.vmwareapi.images [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Uploaded image 7303d3bd-2aee-4964-855b-6068bc1100ed to the Glance image server {{(pid=61974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 998.848750] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Destroying the VM {{(pid=61974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 998.849361] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-82a2168d-f21b-48c6-9a32-7e9082a6f1a3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.857268] env[61974]: DEBUG oslo_vmware.api [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 998.857268] env[61974]: value = "task-1379274" [ 998.857268] env[61974]: _type = "Task" [ 998.857268] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.869742] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379272, 'name': ReconfigVM_Task, 'duration_secs': 0.749599} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.872959] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Reconfigured VM instance instance-00000057 to attach disk [datastore1] cc048c22-81e0-40fb-9a06-9b84a54e4891/cc048c22-81e0-40fb-9a06-9b84a54e4891.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 998.873749] env[61974]: DEBUG oslo_vmware.api [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379274, 'name': Destroy_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.873996] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cc809644-cc69-49b7-aae6-ae96ed47f631 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.884543] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52b7a004-fdc4-2bcc-dd1c-4e57bddc0ad2, 'name': SearchDatastore_Task, 'duration_secs': 0.011656} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.885858] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.886146] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 1a04b388-8739-4b46-a8e1-cd79835bcf48/1a04b388-8739-4b46-a8e1-cd79835bcf48.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 998.886478] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 998.886478] env[61974]: value = "task-1379275" [ 998.886478] env[61974]: _type = "Task" [ 998.886478] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.886673] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ebe45c2f-eb58-4aa6-8646-0d7258c08b51 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.897674] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379275, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.899088] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 998.899088] env[61974]: value = "task-1379276" [ 998.899088] env[61974]: _type = "Task" [ 998.899088] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.908575] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379276, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.987995] env[61974]: DEBUG oslo_vmware.api [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379273, 'name': PowerOffVM_Task, 'duration_secs': 0.304131} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.988294] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 998.988486] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Updating instance 'abe0168a-e838-468a-a223-7c2a64497c0c' progress to 17 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 999.042222] env[61974]: DEBUG nova.compute.utils [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 999.048262] env[61974]: DEBUG nova.compute.manager [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 999.048858] env[61974]: DEBUG nova.network.neutron [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 999.100981] env[61974]: DEBUG nova.network.neutron [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Successfully updated port: 8e4ec8ca-7d59-4a5f-af91-ca1c71946996 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 999.151800] env[61974]: DEBUG nova.policy [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '139f2fab7d4c492ab0d6fb16ea947457', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4757d24b61794cfcaefff2ad44e02b74', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 999.370615] env[61974]: DEBUG oslo_vmware.api [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379274, 'name': Destroy_Task, 'duration_secs': 0.510744} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.370973] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Destroyed the VM [ 999.371335] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Deleting Snapshot of the VM instance {{(pid=61974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 999.371616] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8274855b-fc1c-4ceb-9152-0adef20f44b0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.382034] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b126c14f-32dc-44e3-a6fa-165027f2a95e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.385509] env[61974]: DEBUG oslo_vmware.api [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 999.385509] env[61974]: value = "task-1379277" [ 999.385509] env[61974]: _type = "Task" [ 999.385509] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.396380] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fec7302-ee1a-465c-b0c3-0b89896efee2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.403396] env[61974]: DEBUG oslo_vmware.api [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379277, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.411563] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379275, 'name': Rename_Task, 'duration_secs': 0.182163} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.440265] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 999.440680] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379276, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.441212] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-19cf29d8-8771-4e29-875f-1f21bb31ee5d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.444397] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96c6d78-c422-4819-a0e1-541e5e97de40 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.457393] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 999.457393] env[61974]: value = "task-1379278" [ 999.457393] env[61974]: _type = "Task" [ 999.457393] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.459252] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f65b92-2c22-478c-be90-9f809760e23c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.479487] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379278, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.491083] env[61974]: DEBUG nova.compute.provider_tree [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 999.495804] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 999.495804] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 999.495999] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 999.496263] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 999.496263] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 999.496464] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 999.500118] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 999.500118] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 999.500118] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 999.500118] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 999.500118] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 999.504453] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ceeffc64-2223-4edc-a03a-b291c68e6cdf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.518150] env[61974]: DEBUG nova.network.neutron [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Successfully created port: 1d8be3ef-cc07-4962-8443-8b4f3bce14ce {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 999.527537] env[61974]: DEBUG oslo_vmware.api [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 999.527537] env[61974]: value = "task-1379279" [ 999.527537] env[61974]: _type = "Task" [ 999.527537] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.538633] env[61974]: DEBUG oslo_vmware.api [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379279, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.550026] env[61974]: DEBUG nova.compute.manager [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 999.605685] env[61974]: DEBUG oslo_concurrency.lockutils [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Acquiring lock "refresh_cache-ef17d87d-31ae-4d08-afba-157521e7d1e3" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.605952] env[61974]: DEBUG oslo_concurrency.lockutils [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Acquired lock "refresh_cache-ef17d87d-31ae-4d08-afba-157521e7d1e3" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.606103] env[61974]: DEBUG nova.network.neutron [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 999.897320] env[61974]: DEBUG oslo_vmware.api [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379277, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.914880] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379276, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.634477} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.915279] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 1a04b388-8739-4b46-a8e1-cd79835bcf48/1a04b388-8739-4b46-a8e1-cd79835bcf48.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 999.915536] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 999.915658] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b170e3ab-ba99-4ebb-a035-976a6b095298 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.923887] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 999.923887] env[61974]: value = "task-1379280" [ 999.923887] env[61974]: _type = "Task" [ 999.923887] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.934399] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379280, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.975279] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379278, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.998142] env[61974]: DEBUG nova.scheduler.client.report [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1000.039766] env[61974]: DEBUG oslo_vmware.api [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379279, 'name': ReconfigVM_Task, 'duration_secs': 0.336095} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.040300] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Updating instance 'abe0168a-e838-468a-a223-7c2a64497c0c' progress to 33 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1000.153826] env[61974]: DEBUG nova.network.neutron [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1000.348422] env[61974]: DEBUG nova.network.neutron [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Updating instance_info_cache with network_info: [{"id": "8e4ec8ca-7d59-4a5f-af91-ca1c71946996", "address": "fa:16:3e:e8:41:ba", "network": {"id": "4af74cdc-c170-4a7e-a83c-f7192dc90a8e", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-108067414-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09232b7456d94a96acefecf6098bd274", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e4ec8ca-7d", "ovs_interfaceid": "8e4ec8ca-7d59-4a5f-af91-ca1c71946996", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.398817] env[61974]: DEBUG oslo_vmware.api [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379277, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.438120] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379280, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079888} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.438120] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1000.438120] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e331286-d46d-426f-893a-26c50028df78 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.465565] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 1a04b388-8739-4b46-a8e1-cd79835bcf48/1a04b388-8739-4b46-a8e1-cd79835bcf48.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1000.466176] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3bbd718f-55b1-42e9-8865-819ab617496c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.494389] env[61974]: DEBUG oslo_vmware.api [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379278, 'name': PowerOnVM_Task, 'duration_secs': 0.549834} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.496746] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1000.497258] env[61974]: INFO nova.compute.manager [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Took 9.15 seconds to spawn the instance on the hypervisor. [ 1000.497590] env[61974]: DEBUG nova.compute.manager [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1000.498137] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1000.498137] env[61974]: value = "task-1379281" [ 1000.498137] env[61974]: _type = "Task" [ 1000.498137] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.499652] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29881bbc-9eb7-4e02-9bc3-beffb4f2b4aa {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.504448] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.966s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.512237] env[61974]: DEBUG oslo_concurrency.lockutils [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.067s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.512802] env[61974]: DEBUG nova.objects.instance [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lazy-loading 'resources' on Instance uuid 90f8acb1-a0b5-4459-a9d7-c12f652b0b51 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1000.526612] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379281, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.529492] env[61974]: INFO nova.scheduler.client.report [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Deleted allocations for instance 0ce75511-290c-4fea-9657-dfdd8d9efc4b [ 1000.549905] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1000.550938] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1000.550938] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1000.550938] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1000.551115] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1000.551220] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1000.551399] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1000.551546] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1000.552021] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1000.552021] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1000.552227] env[61974]: DEBUG nova.virt.hardware [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1000.557533] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Reconfiguring VM instance instance-00000053 to detach disk 2000 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1000.558690] env[61974]: DEBUG nova.compute.manager [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1000.560811] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18594e75-6e8c-4af2-8162-074f5a4803f4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.584104] env[61974]: DEBUG oslo_vmware.api [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1000.584104] env[61974]: value = "task-1379282" [ 1000.584104] env[61974]: _type = "Task" [ 1000.584104] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.593937] env[61974]: DEBUG oslo_vmware.api [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379282, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.603070] env[61974]: DEBUG nova.virt.hardware [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1000.603344] env[61974]: DEBUG nova.virt.hardware [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1000.603509] env[61974]: DEBUG nova.virt.hardware [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1000.603698] env[61974]: DEBUG nova.virt.hardware [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1000.603851] env[61974]: DEBUG nova.virt.hardware [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1000.604081] env[61974]: DEBUG nova.virt.hardware [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1000.604373] env[61974]: DEBUG nova.virt.hardware [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1000.604527] env[61974]: DEBUG nova.virt.hardware [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1000.604787] env[61974]: DEBUG nova.virt.hardware [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1000.604944] env[61974]: DEBUG nova.virt.hardware [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1000.605195] env[61974]: DEBUG nova.virt.hardware [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1000.606133] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2c4b63-a989-400c-b63b-ebd1db97bc38 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.614916] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b89a94-6d98-41ed-a867-64a21521cacd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.851502] env[61974]: DEBUG oslo_concurrency.lockutils [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Releasing lock "refresh_cache-ef17d87d-31ae-4d08-afba-157521e7d1e3" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.851764] env[61974]: DEBUG nova.compute.manager [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Instance network_info: |[{"id": "8e4ec8ca-7d59-4a5f-af91-ca1c71946996", "address": "fa:16:3e:e8:41:ba", "network": {"id": "4af74cdc-c170-4a7e-a83c-f7192dc90a8e", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-108067414-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09232b7456d94a96acefecf6098bd274", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e4ec8ca-7d", "ovs_interfaceid": "8e4ec8ca-7d59-4a5f-af91-ca1c71946996", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1000.852247] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:41:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa8c2f93-f287-41b3-adb6-4942a7ea2a0b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8e4ec8ca-7d59-4a5f-af91-ca1c71946996', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1000.877089] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Creating folder: Project (09232b7456d94a96acefecf6098bd274). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1000.877089] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e7c6361-3dc4-4de4-a40b-6891991c96d9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.878923] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "6e698472-b4c0-45dc-869d-d51bbe00552c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.879048] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "6e698472-b4c0-45dc-869d-d51bbe00552c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.882552] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Created folder: Project (09232b7456d94a96acefecf6098bd274) in parent group-v292912. [ 1000.882672] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Creating folder: Instances. Parent ref: group-v293007. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1000.883342] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f17e0dbd-7bd7-496f-90fe-25c28484ad58 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.893987] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Created folder: Instances in parent group-v293007. [ 1000.896043] env[61974]: DEBUG oslo.service.loopingcall [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1000.896043] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1000.896043] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2255e1d7-dea8-4c61-9052-de425ce6eccd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.914589] env[61974]: DEBUG oslo_vmware.api [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379277, 'name': RemoveSnapshot_Task, 'duration_secs': 1.243535} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.915211] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Deleted Snapshot of the VM instance {{(pid=61974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1000.915552] env[61974]: INFO nova.compute.manager [None req-ac3de77c-0b8d-4779-acb7-6093602cbafc tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Took 15.86 seconds to snapshot the instance on the hypervisor. [ 1000.922094] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1000.922094] env[61974]: value = "task-1379285" [ 1000.922094] env[61974]: _type = "Task" [ 1000.922094] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.931882] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379285, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.960251] env[61974]: DEBUG nova.compute.manager [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Received event network-changed-8e4ec8ca-7d59-4a5f-af91-ca1c71946996 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1000.960251] env[61974]: DEBUG nova.compute.manager [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Refreshing instance network info cache due to event network-changed-8e4ec8ca-7d59-4a5f-af91-ca1c71946996. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1000.960251] env[61974]: DEBUG oslo_concurrency.lockutils [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] Acquiring lock "refresh_cache-ef17d87d-31ae-4d08-afba-157521e7d1e3" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.960251] env[61974]: DEBUG oslo_concurrency.lockutils [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] Acquired lock "refresh_cache-ef17d87d-31ae-4d08-afba-157521e7d1e3" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.961087] env[61974]: DEBUG nova.network.neutron [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Refreshing network info cache for port 8e4ec8ca-7d59-4a5f-af91-ca1c71946996 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1001.018096] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379281, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.037270] env[61974]: INFO nova.compute.manager [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Took 24.36 seconds to build instance. [ 1001.040604] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b682e3f0-b894-4e94-a3dc-221a4812ad31 tempest-ServerShowV247Test-184757694 tempest-ServerShowV247Test-184757694-project-member] Lock "0ce75511-290c-4fea-9657-dfdd8d9efc4b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.719s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.095246] env[61974]: DEBUG oslo_vmware.api [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379282, 'name': ReconfigVM_Task, 'duration_secs': 0.333941} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.095246] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Reconfigured VM instance instance-00000053 to detach disk 2000 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1001.095246] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6bbbaa6-2495-4864-a020-d70fa56d9818 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.121746] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] abe0168a-e838-468a-a223-7c2a64497c0c/abe0168a-e838-468a-a223-7c2a64497c0c.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1001.124658] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3c8dc48-dba9-4f18-8af8-6ca7c179b7aa {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.146851] env[61974]: DEBUG oslo_vmware.api [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1001.146851] env[61974]: value = "task-1379286" [ 1001.146851] env[61974]: _type = "Task" [ 1001.146851] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.158215] env[61974]: DEBUG oslo_vmware.api [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379286, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.172956] env[61974]: DEBUG nova.compute.manager [req-20338532-be6b-4d9e-89e9-d9cf9440451e req-01ae6bb4-e2d1-4689-8f80-468886427510 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Received event network-changed-a2b829bf-e2cb-41c7-a840-499beb350683 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1001.173247] env[61974]: DEBUG nova.compute.manager [req-20338532-be6b-4d9e-89e9-d9cf9440451e req-01ae6bb4-e2d1-4689-8f80-468886427510 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Refreshing instance network info cache due to event network-changed-a2b829bf-e2cb-41c7-a840-499beb350683. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1001.173688] env[61974]: DEBUG oslo_concurrency.lockutils [req-20338532-be6b-4d9e-89e9-d9cf9440451e req-01ae6bb4-e2d1-4689-8f80-468886427510 service nova] Acquiring lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.173905] env[61974]: DEBUG oslo_concurrency.lockutils [req-20338532-be6b-4d9e-89e9-d9cf9440451e req-01ae6bb4-e2d1-4689-8f80-468886427510 service nova] Acquired lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.174104] env[61974]: DEBUG nova.network.neutron [req-20338532-be6b-4d9e-89e9-d9cf9440451e req-01ae6bb4-e2d1-4689-8f80-468886427510 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Refreshing network info cache for port a2b829bf-e2cb-41c7-a840-499beb350683 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1001.278980] env[61974]: DEBUG nova.network.neutron [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Successfully updated port: 1d8be3ef-cc07-4962-8443-8b4f3bce14ce {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1001.355889] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-378b1baa-1c05-4a8f-b762-8b9b2381d16a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.364987] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f7172d-928e-470c-bee4-ece3d225b24e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.400993] env[61974]: DEBUG nova.compute.manager [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1001.404519] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4320e4-0e4f-415b-98e4-aaf4861dced5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.413762] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b511d0ca-4f6c-4794-997a-f22ff408b7fa {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.431229] env[61974]: DEBUG nova.compute.provider_tree [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1001.442983] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379285, 'name': CreateVM_Task} progress is 25%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.514608] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379281, 'name': ReconfigVM_Task, 'duration_secs': 0.71322} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.514867] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 1a04b388-8739-4b46-a8e1-cd79835bcf48/1a04b388-8739-4b46-a8e1-cd79835bcf48.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1001.515536] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f49b0e53-e1f1-4db1-94ee-9b96f2c93154 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.523416] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1001.523416] env[61974]: value = "task-1379287" [ 1001.523416] env[61974]: _type = "Task" [ 1001.523416] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.532163] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379287, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.539021] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a359fbe1-78b1-4e23-9407-4b3b45d13afc tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "cc048c22-81e0-40fb-9a06-9b84a54e4891" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.870s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.667978] env[61974]: DEBUG oslo_vmware.api [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379286, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.740579] env[61974]: DEBUG nova.network.neutron [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Updated VIF entry in instance network info cache for port 8e4ec8ca-7d59-4a5f-af91-ca1c71946996. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1001.741055] env[61974]: DEBUG nova.network.neutron [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Updating instance_info_cache with network_info: [{"id": "8e4ec8ca-7d59-4a5f-af91-ca1c71946996", "address": "fa:16:3e:e8:41:ba", "network": {"id": "4af74cdc-c170-4a7e-a83c-f7192dc90a8e", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-108067414-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09232b7456d94a96acefecf6098bd274", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e4ec8ca-7d", "ovs_interfaceid": "8e4ec8ca-7d59-4a5f-af91-ca1c71946996", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.789911] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "refresh_cache-603bcf2a-fc99-4ba4-b757-c37d93554870" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.790096] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired lock "refresh_cache-603bcf2a-fc99-4ba4-b757-c37d93554870" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.790273] env[61974]: DEBUG nova.network.neutron [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1001.936942] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.938142] env[61974]: DEBUG nova.scheduler.client.report [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1001.947414] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379285, 'name': CreateVM_Task, 'duration_secs': 0.813032} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.947796] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1001.952444] env[61974]: DEBUG oslo_concurrency.lockutils [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.952642] env[61974]: DEBUG oslo_concurrency.lockutils [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.952985] env[61974]: DEBUG oslo_concurrency.lockutils [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1001.953393] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfa8f901-cbd1-4076-a480-dce347f158d7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.960162] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Waiting for the task: (returnval){ [ 1001.960162] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52dc223e-5d4e-6145-ba51-15977b0b5c93" [ 1001.960162] env[61974]: _type = "Task" [ 1001.960162] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.970645] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52dc223e-5d4e-6145-ba51-15977b0b5c93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.980511] env[61974]: DEBUG nova.network.neutron [req-20338532-be6b-4d9e-89e9-d9cf9440451e req-01ae6bb4-e2d1-4689-8f80-468886427510 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Updated VIF entry in instance network info cache for port a2b829bf-e2cb-41c7-a840-499beb350683. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1001.981253] env[61974]: DEBUG nova.network.neutron [req-20338532-be6b-4d9e-89e9-d9cf9440451e req-01ae6bb4-e2d1-4689-8f80-468886427510 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Updating instance_info_cache with network_info: [{"id": "a2b829bf-e2cb-41c7-a840-499beb350683", "address": "fa:16:3e:4a:5e:1d", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2b829bf-e2", "ovs_interfaceid": "a2b829bf-e2cb-41c7-a840-499beb350683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.038693] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379287, 'name': Rename_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.160623] env[61974]: DEBUG oslo_vmware.api [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379286, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.244251] env[61974]: DEBUG oslo_concurrency.lockutils [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] Releasing lock "refresh_cache-ef17d87d-31ae-4d08-afba-157521e7d1e3" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.244562] env[61974]: DEBUG nova.compute.manager [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Received event network-changed-a342d02a-7577-428c-946f-e5725112ceec {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1002.244747] env[61974]: DEBUG nova.compute.manager [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Refreshing instance network info cache due to event network-changed-a342d02a-7577-428c-946f-e5725112ceec. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1002.245034] env[61974]: DEBUG oslo_concurrency.lockutils [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] Acquiring lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.245210] env[61974]: DEBUG oslo_concurrency.lockutils [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] Acquired lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.245385] env[61974]: DEBUG nova.network.neutron [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Refreshing network info cache for port a342d02a-7577-428c-946f-e5725112ceec {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1002.325339] env[61974]: DEBUG nova.network.neutron [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1002.449354] env[61974]: DEBUG oslo_concurrency.lockutils [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.937s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.451418] env[61974]: DEBUG oslo_concurrency.lockutils [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.643s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.452904] env[61974]: INFO nova.compute.claims [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1002.471100] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52dc223e-5d4e-6145-ba51-15977b0b5c93, 'name': SearchDatastore_Task, 'duration_secs': 0.025827} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.471888] env[61974]: INFO nova.scheduler.client.report [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Deleted allocations for instance 90f8acb1-a0b5-4459-a9d7-c12f652b0b51 [ 1002.478927] env[61974]: DEBUG oslo_concurrency.lockutils [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.478927] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1002.478927] env[61974]: DEBUG oslo_concurrency.lockutils [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.478927] env[61974]: DEBUG oslo_concurrency.lockutils [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.478927] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1002.478927] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-079e0c05-3079-43c4-a354-1750626040a6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.483838] env[61974]: DEBUG oslo_concurrency.lockutils [req-20338532-be6b-4d9e-89e9-d9cf9440451e req-01ae6bb4-e2d1-4689-8f80-468886427510 service nova] Releasing lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.499796] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1002.499796] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1002.499796] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6768389e-e07d-4913-84ef-d26dd06284f7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.505686] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Waiting for the task: (returnval){ [ 1002.505686] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52a5b90c-64d5-2e86-dfdf-d55da4a650c9" [ 1002.505686] env[61974]: _type = "Task" [ 1002.505686] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.516106] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52a5b90c-64d5-2e86-dfdf-d55da4a650c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.517156] env[61974]: DEBUG nova.network.neutron [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Updating instance_info_cache with network_info: [{"id": "1d8be3ef-cc07-4962-8443-8b4f3bce14ce", "address": "fa:16:3e:5d:37:62", "network": {"id": "bbb72d46-05ed-4ca3-80a8-0e9b6e6ccb5d", "bridge": "br-int", "label": "tempest-ServersTestJSON-148366285-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4757d24b61794cfcaefff2ad44e02b74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d8be3ef-cc", "ovs_interfaceid": "1d8be3ef-cc07-4962-8443-8b4f3bce14ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.535947] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379287, 'name': Rename_Task, 'duration_secs': 1.012055} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.536272] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1002.536531] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa10b1df-adc8-43a2-9e4d-82fffd897248 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.544155] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1002.544155] env[61974]: value = "task-1379288" [ 1002.544155] env[61974]: _type = "Task" [ 1002.544155] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.553852] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379288, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.647287] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "cc048c22-81e0-40fb-9a06-9b84a54e4891" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.647591] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "cc048c22-81e0-40fb-9a06-9b84a54e4891" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.647833] env[61974]: INFO nova.compute.manager [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Shelving [ 1002.659726] env[61974]: DEBUG oslo_vmware.api [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379286, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.939777] env[61974]: DEBUG oslo_concurrency.lockutils [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "interface-ceb0dd02-6441-4923-99f6-73f8eab86fe5-915eddb2-5b76-46da-8c84-a99ed89ca777" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.940165] env[61974]: DEBUG oslo_concurrency.lockutils [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "interface-ceb0dd02-6441-4923-99f6-73f8eab86fe5-915eddb2-5b76-46da-8c84-a99ed89ca777" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.940614] env[61974]: DEBUG nova.objects.instance [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lazy-loading 'flavor' on Instance uuid ceb0dd02-6441-4923-99f6-73f8eab86fe5 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1002.979258] env[61974]: DEBUG oslo_concurrency.lockutils [None req-07f3d5c4-f769-4f57-80b7-8af5fe8d7dd6 tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "90f8acb1-a0b5-4459-a9d7-c12f652b0b51" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.748s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.023201] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Releasing lock "refresh_cache-603bcf2a-fc99-4ba4-b757-c37d93554870" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.023553] env[61974]: DEBUG nova.compute.manager [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Instance network_info: |[{"id": "1d8be3ef-cc07-4962-8443-8b4f3bce14ce", "address": "fa:16:3e:5d:37:62", "network": {"id": "bbb72d46-05ed-4ca3-80a8-0e9b6e6ccb5d", "bridge": "br-int", "label": "tempest-ServersTestJSON-148366285-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4757d24b61794cfcaefff2ad44e02b74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d8be3ef-cc", "ovs_interfaceid": "1d8be3ef-cc07-4962-8443-8b4f3bce14ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1003.023916] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52a5b90c-64d5-2e86-dfdf-d55da4a650c9, 'name': SearchDatastore_Task, 'duration_secs': 0.009998} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.024324] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:37:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4728adca-2846-416a-91a3-deb898faf1f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1d8be3ef-cc07-4962-8443-8b4f3bce14ce', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1003.032042] env[61974]: DEBUG oslo.service.loopingcall [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1003.033794] env[61974]: DEBUG nova.network.neutron [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Updated VIF entry in instance network info cache for port a342d02a-7577-428c-946f-e5725112ceec. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1003.034277] env[61974]: DEBUG nova.network.neutron [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Updating instance_info_cache with network_info: [{"id": "a342d02a-7577-428c-946f-e5725112ceec", "address": "fa:16:3e:99:e8:62", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa342d02a-75", "ovs_interfaceid": "a342d02a-7577-428c-946f-e5725112ceec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.035901] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1003.036240] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f34798e-cf5b-447b-991e-4d165b7375d8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.038746] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9b8992c-a549-4f05-9ce9-8bf2ddcac378 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.059566] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Waiting for the task: (returnval){ [ 1003.059566] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]523229c8-41ab-c54e-d3c2-e2bc8ec60195" [ 1003.059566] env[61974]: _type = "Task" [ 1003.059566] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.067279] env[61974]: DEBUG oslo_vmware.api [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379288, 'name': PowerOnVM_Task, 'duration_secs': 0.504421} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.067477] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1003.067477] env[61974]: value = "task-1379289" [ 1003.067477] env[61974]: _type = "Task" [ 1003.067477] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.068141] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1003.068368] env[61974]: INFO nova.compute.manager [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Took 9.76 seconds to spawn the instance on the hypervisor. [ 1003.068557] env[61974]: DEBUG nova.compute.manager [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1003.069534] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2452568d-2f4f-4b9c-a405-a63c260d23e1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.077923] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]523229c8-41ab-c54e-d3c2-e2bc8ec60195, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.087768] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379289, 'name': CreateVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.159453] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1003.159762] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e3ab7e28-9bbc-407c-8f3a-1548e2682b62 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.165057] env[61974]: DEBUG oslo_vmware.api [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379286, 'name': ReconfigVM_Task, 'duration_secs': 1.637433} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.165751] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Reconfigured VM instance instance-00000053 to attach disk [datastore2] abe0168a-e838-468a-a223-7c2a64497c0c/abe0168a-e838-468a-a223-7c2a64497c0c.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1003.166101] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Updating instance 'abe0168a-e838-468a-a223-7c2a64497c0c' progress to 50 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1003.171473] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1003.171473] env[61974]: value = "task-1379290" [ 1003.171473] env[61974]: _type = "Task" [ 1003.171473] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.182094] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379290, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.235829] env[61974]: DEBUG nova.compute.manager [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Received event network-changed-a342d02a-7577-428c-946f-e5725112ceec {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1003.235829] env[61974]: DEBUG nova.compute.manager [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Refreshing instance network info cache due to event network-changed-a342d02a-7577-428c-946f-e5725112ceec. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1003.236029] env[61974]: DEBUG oslo_concurrency.lockutils [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] Acquiring lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.540859] env[61974]: DEBUG oslo_concurrency.lockutils [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] Releasing lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.540859] env[61974]: DEBUG nova.compute.manager [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Received event network-changed-a2b829bf-e2cb-41c7-a840-499beb350683 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1003.540859] env[61974]: DEBUG nova.compute.manager [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Refreshing instance network info cache due to event network-changed-a2b829bf-e2cb-41c7-a840-499beb350683. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1003.540859] env[61974]: DEBUG oslo_concurrency.lockutils [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] Acquiring lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.540859] env[61974]: DEBUG oslo_concurrency.lockutils [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] Acquired lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.540859] env[61974]: DEBUG nova.network.neutron [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Refreshing network info cache for port a2b829bf-e2cb-41c7-a840-499beb350683 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1003.542361] env[61974]: DEBUG oslo_concurrency.lockutils [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] Acquired lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.542463] env[61974]: DEBUG nova.network.neutron [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Refreshing network info cache for port a342d02a-7577-428c-946f-e5725112ceec {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1003.547651] env[61974]: DEBUG nova.objects.instance [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lazy-loading 'pci_requests' on Instance uuid ceb0dd02-6441-4923-99f6-73f8eab86fe5 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1003.573381] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]523229c8-41ab-c54e-d3c2-e2bc8ec60195, 'name': SearchDatastore_Task, 'duration_secs': 0.020244} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.576473] env[61974]: DEBUG oslo_concurrency.lockutils [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.576798] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] ef17d87d-31ae-4d08-afba-157521e7d1e3/ef17d87d-31ae-4d08-afba-157521e7d1e3.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1003.580148] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7552f133-d2ae-48ae-b6b3-9350c15aeca5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.587309] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379289, 'name': CreateVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.593191] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Waiting for the task: (returnval){ [ 1003.593191] env[61974]: value = "task-1379291" [ 1003.593191] env[61974]: _type = "Task" [ 1003.593191] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.605415] env[61974]: INFO nova.compute.manager [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Took 20.63 seconds to build instance. [ 1003.612298] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': task-1379291, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.675186] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46eb33b0-853e-4d20-91e5-c59b31eeca73 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.701417] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379290, 'name': PowerOffVM_Task, 'duration_secs': 0.309957} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.704206] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1003.705064] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a92c54e-87c3-47c5-8bb0-5ed4e2dd2fde {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.708245] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93c804a9-6491-4535-95d6-d36ddd4fe443 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.710708] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "68ad5903-e502-406b-a19e-9e4c28aa5035" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.711761] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "68ad5903-e502-406b-a19e-9e4c28aa5035" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.711761] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "68ad5903-e502-406b-a19e-9e4c28aa5035-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.711761] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "68ad5903-e502-406b-a19e-9e4c28aa5035-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.711761] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "68ad5903-e502-406b-a19e-9e4c28aa5035-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.714648] env[61974]: INFO nova.compute.manager [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Terminating instance [ 1003.717028] env[61974]: DEBUG nova.compute.manager [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1003.717213] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1003.731407] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4118401-9936-4273-9c56-7e6c75d3cb6f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.749822] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09dd27e8-edc8-4fc6-b72b-3a399a4dc994 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.752455] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Updating instance 'abe0168a-e838-468a-a223-7c2a64497c0c' progress to 67 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1003.761441] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1003.763408] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f4355351-e187-4941-9eb8-33442660f5e3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.768291] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c03a47c-c5c6-4fc0-893f-cabcb06e38ec {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.777401] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ddc444c-aa91-485c-8a91-34bd983f806a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.781067] env[61974]: DEBUG oslo_vmware.api [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 1003.781067] env[61974]: value = "task-1379292" [ 1003.781067] env[61974]: _type = "Task" [ 1003.781067] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.821018] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f08d294-fc03-4fb0-8a1c-62d711bd5ed6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.827152] env[61974]: DEBUG oslo_vmware.api [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379292, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.833303] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72b3366f-c1b2-4815-8a60-053c5f5cb5a2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.848482] env[61974]: DEBUG nova.compute.provider_tree [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.053294] env[61974]: DEBUG nova.objects.base [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=61974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1004.053679] env[61974]: DEBUG nova.network.neutron [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1004.084295] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379289, 'name': CreateVM_Task, 'duration_secs': 0.606651} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.084530] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1004.085409] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.085632] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.086093] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1004.086470] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-778c2a0a-a465-4c80-a144-1f8f3c90e64a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.093341] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1004.093341] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52afb4db-0809-ad6b-e291-23b0d6b2e3b5" [ 1004.093341] env[61974]: _type = "Task" [ 1004.093341] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.110801] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fdb54fc8-3a33-43a3-a364-d3690e7044b7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "1a04b388-8739-4b46-a8e1-cd79835bcf48" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.144s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.111167] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52afb4db-0809-ad6b-e291-23b0d6b2e3b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.120615] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': task-1379291, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.126392] env[61974]: DEBUG nova.compute.manager [req-3007b11d-fb34-41c0-924b-00e94f20d9c1 req-ae9bbf4d-a705-4f21-a553-da05bff07ab8 service nova] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Received event network-changed-f32df777-3ba7-47f1-9845-8327f4f53fe8 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1004.126445] env[61974]: DEBUG nova.compute.manager [req-3007b11d-fb34-41c0-924b-00e94f20d9c1 req-ae9bbf4d-a705-4f21-a553-da05bff07ab8 service nova] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Refreshing instance network info cache due to event network-changed-f32df777-3ba7-47f1-9845-8327f4f53fe8. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1004.126765] env[61974]: DEBUG oslo_concurrency.lockutils [req-3007b11d-fb34-41c0-924b-00e94f20d9c1 req-ae9bbf4d-a705-4f21-a553-da05bff07ab8 service nova] Acquiring lock "refresh_cache-1a04b388-8739-4b46-a8e1-cd79835bcf48" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.126874] env[61974]: DEBUG oslo_concurrency.lockutils [req-3007b11d-fb34-41c0-924b-00e94f20d9c1 req-ae9bbf4d-a705-4f21-a553-da05bff07ab8 service nova] Acquired lock "refresh_cache-1a04b388-8739-4b46-a8e1-cd79835bcf48" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.127167] env[61974]: DEBUG nova.network.neutron [req-3007b11d-fb34-41c0-924b-00e94f20d9c1 req-ae9bbf4d-a705-4f21-a553-da05bff07ab8 service nova] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Refreshing network info cache for port f32df777-3ba7-47f1-9845-8327f4f53fe8 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1004.198332] env[61974]: DEBUG nova.policy [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91e186033f624742a59502ddf87167f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '102785ae1c584cdb925a55afc3412fb9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1004.273485] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Creating Snapshot of the VM instance {{(pid=61974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1004.274015] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e3e7c735-7692-40a5-bffa-1682d73f57a9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.290151] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1004.290151] env[61974]: value = "task-1379293" [ 1004.290151] env[61974]: _type = "Task" [ 1004.290151] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.301430] env[61974]: DEBUG oslo_vmware.api [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379292, 'name': PowerOffVM_Task, 'duration_secs': 0.284052} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.302499] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1004.302723] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1004.302995] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62bb9ddb-b859-4894-9991-6e18768de35f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.308460] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379293, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.310497] env[61974]: DEBUG nova.network.neutron [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Updated VIF entry in instance network info cache for port a2b829bf-e2cb-41c7-a840-499beb350683. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1004.310874] env[61974]: DEBUG nova.network.neutron [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Updating instance_info_cache with network_info: [{"id": "a2b829bf-e2cb-41c7-a840-499beb350683", "address": "fa:16:3e:4a:5e:1d", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2b829bf-e2", "ovs_interfaceid": "a2b829bf-e2cb-41c7-a840-499beb350683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.351811] env[61974]: DEBUG nova.scheduler.client.report [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1004.356815] env[61974]: DEBUG nova.network.neutron [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Port f2f9e10a-4e37-47fa-8040-638e6376acc6 binding to destination host cpu-1 is already ACTIVE {{(pid=61974) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1004.379631] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1004.380240] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1004.380965] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Deleting the datastore file [datastore2] 68ad5903-e502-406b-a19e-9e4c28aa5035 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1004.380965] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a63a430-4c52-4b35-9036-aef502d376f2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.389468] env[61974]: DEBUG oslo_vmware.api [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 1004.389468] env[61974]: value = "task-1379295" [ 1004.389468] env[61974]: _type = "Task" [ 1004.389468] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.399670] env[61974]: DEBUG oslo_vmware.api [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379295, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.568798] env[61974]: DEBUG nova.network.neutron [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Updated VIF entry in instance network info cache for port a342d02a-7577-428c-946f-e5725112ceec. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1004.569284] env[61974]: DEBUG nova.network.neutron [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Updating instance_info_cache with network_info: [{"id": "a342d02a-7577-428c-946f-e5725112ceec", "address": "fa:16:3e:99:e8:62", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa342d02a-75", "ovs_interfaceid": "a342d02a-7577-428c-946f-e5725112ceec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.608290] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52afb4db-0809-ad6b-e291-23b0d6b2e3b5, 'name': SearchDatastore_Task, 'duration_secs': 0.063898} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.609112] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.609404] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1004.609666] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.609858] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.610266] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1004.610431] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e969e6b8-847c-4974-b5cb-5f7552584a9d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.615570] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': task-1379291, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.661953} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.616136] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] ef17d87d-31ae-4d08-afba-157521e7d1e3/ef17d87d-31ae-4d08-afba-157521e7d1e3.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1004.616584] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1004.616935] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e68684fd-f0a2-48bd-af26-3f90c7027f60 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.627854] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Waiting for the task: (returnval){ [ 1004.627854] env[61974]: value = "task-1379296" [ 1004.627854] env[61974]: _type = "Task" [ 1004.627854] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.627854] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1004.627854] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1004.627854] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b158c244-77fe-4a41-a404-1a3a7356e6ac {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.638128] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1004.638128] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52502133-0e1b-3fee-cb22-3475d5f6c21c" [ 1004.638128] env[61974]: _type = "Task" [ 1004.638128] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.641762] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': task-1379296, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.651445] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52502133-0e1b-3fee-cb22-3475d5f6c21c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.806433] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379293, 'name': CreateSnapshot_Task, 'duration_secs': 0.442248} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.806762] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Created Snapshot of the VM instance {{(pid=61974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1004.807565] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c106e2d-2d62-4bcd-9a4f-8128ec59fa41 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.816511] env[61974]: DEBUG oslo_concurrency.lockutils [req-ddd244cf-9581-4c4b-9be7-336d37b883c7 req-844c396c-4a90-44d3-88cc-505440e47bba service nova] Releasing lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.862180] env[61974]: DEBUG oslo_concurrency.lockutils [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.411s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.862820] env[61974]: DEBUG nova.compute.manager [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1004.872026] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.975s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.872026] env[61974]: DEBUG nova.objects.instance [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lazy-loading 'resources' on Instance uuid e6feee04-8aae-4151-8187-3ef4885bcf73 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1004.900030] env[61974]: DEBUG oslo_vmware.api [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379295, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.346729} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.900502] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1004.900754] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1004.901760] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1004.901760] env[61974]: INFO nova.compute.manager [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1004.901760] env[61974]: DEBUG oslo.service.loopingcall [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1004.901760] env[61974]: DEBUG nova.compute.manager [-] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1004.901760] env[61974]: DEBUG nova.network.neutron [-] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1005.072111] env[61974]: DEBUG oslo_concurrency.lockutils [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] Releasing lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.072417] env[61974]: DEBUG nova.compute.manager [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Received event network-vif-plugged-1d8be3ef-cc07-4962-8443-8b4f3bce14ce {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1005.072622] env[61974]: DEBUG oslo_concurrency.lockutils [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] Acquiring lock "603bcf2a-fc99-4ba4-b757-c37d93554870-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.072853] env[61974]: DEBUG oslo_concurrency.lockutils [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] Lock "603bcf2a-fc99-4ba4-b757-c37d93554870-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.073029] env[61974]: DEBUG oslo_concurrency.lockutils [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] Lock "603bcf2a-fc99-4ba4-b757-c37d93554870-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.073351] env[61974]: DEBUG nova.compute.manager [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] No waiting events found dispatching network-vif-plugged-1d8be3ef-cc07-4962-8443-8b4f3bce14ce {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1005.073389] env[61974]: WARNING nova.compute.manager [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Received unexpected event network-vif-plugged-1d8be3ef-cc07-4962-8443-8b4f3bce14ce for instance with vm_state building and task_state spawning. [ 1005.073537] env[61974]: DEBUG nova.compute.manager [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Received event network-changed-1d8be3ef-cc07-4962-8443-8b4f3bce14ce {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1005.073698] env[61974]: DEBUG nova.compute.manager [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Refreshing instance network info cache due to event network-changed-1d8be3ef-cc07-4962-8443-8b4f3bce14ce. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1005.073889] env[61974]: DEBUG oslo_concurrency.lockutils [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] Acquiring lock "refresh_cache-603bcf2a-fc99-4ba4-b757-c37d93554870" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.074040] env[61974]: DEBUG oslo_concurrency.lockutils [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] Acquired lock "refresh_cache-603bcf2a-fc99-4ba4-b757-c37d93554870" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.074209] env[61974]: DEBUG nova.network.neutron [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Refreshing network info cache for port 1d8be3ef-cc07-4962-8443-8b4f3bce14ce {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1005.086034] env[61974]: DEBUG nova.network.neutron [req-3007b11d-fb34-41c0-924b-00e94f20d9c1 req-ae9bbf4d-a705-4f21-a553-da05bff07ab8 service nova] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Updated VIF entry in instance network info cache for port f32df777-3ba7-47f1-9845-8327f4f53fe8. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1005.086404] env[61974]: DEBUG nova.network.neutron [req-3007b11d-fb34-41c0-924b-00e94f20d9c1 req-ae9bbf4d-a705-4f21-a553-da05bff07ab8 service nova] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Updating instance_info_cache with network_info: [{"id": "f32df777-3ba7-47f1-9845-8327f4f53fe8", "address": "fa:16:3e:91:1f:af", "network": {"id": "be36ebfc-3548-4420-b5b4-b3efb499516a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1190763400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61c671d85b64b28872586c2816b83f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf32df777-3b", "ovs_interfaceid": "f32df777-3ba7-47f1-9845-8327f4f53fe8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.136787] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': task-1379296, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069299} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.137081] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1005.137895] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-948609fe-b2e7-41f0-8a85-1f87b2a49c7e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.162367] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] ef17d87d-31ae-4d08-afba-157521e7d1e3/ef17d87d-31ae-4d08-afba-157521e7d1e3.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1005.163097] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4bd7d53-19c5-43da-b88e-f3d183c2e80f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.181952] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52502133-0e1b-3fee-cb22-3475d5f6c21c, 'name': SearchDatastore_Task, 'duration_secs': 0.012592} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.183182] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f4b7792-21b7-4e32-a437-817fdc6b2997 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.189469] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Waiting for the task: (returnval){ [ 1005.189469] env[61974]: value = "task-1379297" [ 1005.189469] env[61974]: _type = "Task" [ 1005.189469] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.190547] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1005.190547] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]528812e5-2f39-a0b2-2a2b-4d3657c7286c" [ 1005.190547] env[61974]: _type = "Task" [ 1005.190547] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.201828] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': task-1379297, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.204977] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]528812e5-2f39-a0b2-2a2b-4d3657c7286c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.302725] env[61974]: DEBUG nova.compute.manager [req-d27540af-288e-45b3-b93e-63818f8330f8 req-840c6d18-325b-4d3a-be7c-1d3990211045 service nova] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Received event network-vif-deleted-f70a964e-a247-4cd3-a6b1-d308d7d7cb92 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1005.302986] env[61974]: INFO nova.compute.manager [req-d27540af-288e-45b3-b93e-63818f8330f8 req-840c6d18-325b-4d3a-be7c-1d3990211045 service nova] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Neutron deleted interface f70a964e-a247-4cd3-a6b1-d308d7d7cb92; detaching it from the instance and deleting it from the info cache [ 1005.303230] env[61974]: DEBUG nova.network.neutron [req-d27540af-288e-45b3-b93e-63818f8330f8 req-840c6d18-325b-4d3a-be7c-1d3990211045 service nova] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.328799] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Creating linked-clone VM from snapshot {{(pid=61974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1005.329386] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-08417746-a201-4b6a-a228-be740d86991b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.339246] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1005.339246] env[61974]: value = "task-1379298" [ 1005.339246] env[61974]: _type = "Task" [ 1005.339246] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.348341] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379298, 'name': CloneVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.371660] env[61974]: DEBUG nova.compute.utils [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1005.383655] env[61974]: DEBUG nova.compute.manager [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1005.383841] env[61974]: DEBUG nova.network.neutron [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1005.393562] env[61974]: DEBUG oslo_concurrency.lockutils [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "abe0168a-e838-468a-a223-7c2a64497c0c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.393562] env[61974]: DEBUG oslo_concurrency.lockutils [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "abe0168a-e838-468a-a223-7c2a64497c0c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.393753] env[61974]: DEBUG oslo_concurrency.lockutils [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "abe0168a-e838-468a-a223-7c2a64497c0c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.467366] env[61974]: DEBUG nova.policy [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4896588cebd84071a573046de7006429', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2db6af28263c40708c2466226ce03009', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1005.589330] env[61974]: DEBUG oslo_concurrency.lockutils [req-3007b11d-fb34-41c0-924b-00e94f20d9c1 req-ae9bbf4d-a705-4f21-a553-da05bff07ab8 service nova] Releasing lock "refresh_cache-1a04b388-8739-4b46-a8e1-cd79835bcf48" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.664370] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83ad6e1-ec8e-4cff-8585-e70d7bcbbec9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.674340] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109dcb51-1bc5-48c5-9b68-7987b4c173e9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.717560] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eaa975d-948c-4cd3-a735-ef6215746b2a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.732769] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ab644d-4978-4d2c-ba10-802715b2d689 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.737245] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]528812e5-2f39-a0b2-2a2b-4d3657c7286c, 'name': SearchDatastore_Task, 'duration_secs': 0.03559} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.737496] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': task-1379297, 'name': ReconfigVM_Task, 'duration_secs': 0.508452} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.739944] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.740233] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 603bcf2a-fc99-4ba4-b757-c37d93554870/603bcf2a-fc99-4ba4-b757-c37d93554870.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1005.740558] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Reconfigured VM instance instance-00000059 to attach disk [datastore2] ef17d87d-31ae-4d08-afba-157521e7d1e3/ef17d87d-31ae-4d08-afba-157521e7d1e3.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1005.741629] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-95dd04ba-c39a-46d9-9fca-673ea3a12814 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.743669] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-341f2892-aae4-4682-b0d6-f3e97574f1aa {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.753694] env[61974]: DEBUG nova.compute.provider_tree [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1005.762239] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1005.762239] env[61974]: value = "task-1379300" [ 1005.762239] env[61974]: _type = "Task" [ 1005.762239] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.763389] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Waiting for the task: (returnval){ [ 1005.763389] env[61974]: value = "task-1379299" [ 1005.763389] env[61974]: _type = "Task" [ 1005.763389] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.782110] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': task-1379299, 'name': Rename_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.782460] env[61974]: DEBUG nova.network.neutron [-] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.783750] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379300, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.806768] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c44d58d-adae-4bcf-b07f-71d0558e0436 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.819460] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d79f1237-8882-4dae-9559-7413c0b17189 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.849878] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379298, 'name': CloneVM_Task} progress is 94%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.861991] env[61974]: DEBUG nova.compute.manager [req-d27540af-288e-45b3-b93e-63818f8330f8 req-840c6d18-325b-4d3a-be7c-1d3990211045 service nova] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Detach interface failed, port_id=f70a964e-a247-4cd3-a6b1-d308d7d7cb92, reason: Instance 68ad5903-e502-406b-a19e-9e4c28aa5035 could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1005.882764] env[61974]: DEBUG nova.compute.manager [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1005.931399] env[61974]: DEBUG nova.network.neutron [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Successfully created port: e3d592f0-8ee9-4b5c-9397-cf3da1294c61 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1006.385764] env[61974]: DEBUG nova.scheduler.client.report [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1006.385764] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379300, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497867} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.385764] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 603bcf2a-fc99-4ba4-b757-c37d93554870/603bcf2a-fc99-4ba4-b757-c37d93554870.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1006.385764] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1006.385764] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dd4a79f4-fa8a-4315-8c24-87d747726436 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.385764] env[61974]: INFO nova.compute.manager [-] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Took 1.39 seconds to deallocate network for instance. [ 1006.385764] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': task-1379299, 'name': Rename_Task, 'duration_secs': 0.179821} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.385764] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1006.385764] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4dcbc2bc-076a-490d-9fba-a4a5e5bd048f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.385764] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1006.385764] env[61974]: value = "task-1379301" [ 1006.385764] env[61974]: _type = "Task" [ 1006.385764] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.385764] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Waiting for the task: (returnval){ [ 1006.385764] env[61974]: value = "task-1379302" [ 1006.385764] env[61974]: _type = "Task" [ 1006.385764] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.385764] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379301, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.385764] env[61974]: DEBUG nova.network.neutron [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Updated VIF entry in instance network info cache for port 1d8be3ef-cc07-4962-8443-8b4f3bce14ce. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1006.391421] env[61974]: DEBUG nova.network.neutron [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Updating instance_info_cache with network_info: [{"id": "1d8be3ef-cc07-4962-8443-8b4f3bce14ce", "address": "fa:16:3e:5d:37:62", "network": {"id": "bbb72d46-05ed-4ca3-80a8-0e9b6e6ccb5d", "bridge": "br-int", "label": "tempest-ServersTestJSON-148366285-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4757d24b61794cfcaefff2ad44e02b74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d8be3ef-cc", "ovs_interfaceid": "1d8be3ef-cc07-4962-8443-8b4f3bce14ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.391421] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': task-1379302, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.391421] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379298, 'name': CloneVM_Task} progress is 94%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.491106] env[61974]: DEBUG oslo_concurrency.lockutils [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "refresh_cache-abe0168a-e838-468a-a223-7c2a64497c0c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.491314] env[61974]: DEBUG oslo_concurrency.lockutils [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "refresh_cache-abe0168a-e838-468a-a223-7c2a64497c0c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.491604] env[61974]: DEBUG nova.network.neutron [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1006.763292] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.893s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.766190] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.829s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.767756] env[61974]: INFO nova.compute.claims [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1006.774262] env[61974]: DEBUG nova.network.neutron [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Successfully updated port: 915eddb2-5b76-46da-8c84-a99ed89ca777 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1006.801822] env[61974]: INFO nova.scheduler.client.report [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Deleted allocations for instance e6feee04-8aae-4151-8187-3ef4885bcf73 [ 1006.805930] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.819813] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379301, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075954} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.820458] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1006.821247] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c049481b-0c3b-45b3-8edb-c912bae4faa7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.828009] env[61974]: DEBUG oslo_concurrency.lockutils [req-43deeabc-b83d-40a9-922d-e70a5be877c3 req-94a9b20e-8c26-45e9-9716-b0a175b3171b service nova] Releasing lock "refresh_cache-603bcf2a-fc99-4ba4-b757-c37d93554870" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.828256] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': task-1379302, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.850196] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] 603bcf2a-fc99-4ba4-b757-c37d93554870/603bcf2a-fc99-4ba4-b757-c37d93554870.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1006.853516] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bfb648cc-0d5e-4e4c-8a4e-3026c46d219a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.875492] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379298, 'name': CloneVM_Task, 'duration_secs': 1.430247} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.876989] env[61974]: INFO nova.virt.vmwareapi.vmops [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Created linked-clone VM from snapshot [ 1006.877424] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1006.877424] env[61974]: value = "task-1379303" [ 1006.877424] env[61974]: _type = "Task" [ 1006.877424] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.878378] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a42d2cf-b45f-4685-b624-b33bda50e6c9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.889793] env[61974]: DEBUG nova.virt.vmwareapi.images [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Uploading image a500e9bf-1653-49ec-a28f-1c976c4ead03 {{(pid=61974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1006.895733] env[61974]: DEBUG nova.compute.manager [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1006.897608] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379303, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.924123] env[61974]: DEBUG nova.virt.hardware [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1006.924420] env[61974]: DEBUG nova.virt.hardware [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1006.924602] env[61974]: DEBUG nova.virt.hardware [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1006.924810] env[61974]: DEBUG nova.virt.hardware [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1006.924968] env[61974]: DEBUG nova.virt.hardware [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1006.925154] env[61974]: DEBUG nova.virt.hardware [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1006.925430] env[61974]: DEBUG nova.virt.hardware [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1006.925650] env[61974]: DEBUG nova.virt.hardware [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1006.925876] env[61974]: DEBUG nova.virt.hardware [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1006.926133] env[61974]: DEBUG nova.virt.hardware [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1006.926370] env[61974]: DEBUG nova.virt.hardware [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1006.928748] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae0e731-fd7f-40df-bce8-e3b6e3b5cd66 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.939697] env[61974]: DEBUG oslo_vmware.rw_handles [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1006.939697] env[61974]: value = "vm-293012" [ 1006.939697] env[61974]: _type = "VirtualMachine" [ 1006.939697] env[61974]: }. {{(pid=61974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1006.940938] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3da27ce-e92c-486b-8dbc-43a41b591b4d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.945240] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-5899ed65-809c-4bfb-8b8c-28991b6e62ba {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.960198] env[61974]: DEBUG oslo_vmware.rw_handles [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lease: (returnval){ [ 1006.960198] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]528bbd78-f0dd-95b6-61b9-e96c89a695ff" [ 1006.960198] env[61974]: _type = "HttpNfcLease" [ 1006.960198] env[61974]: } obtained for exporting VM: (result){ [ 1006.960198] env[61974]: value = "vm-293012" [ 1006.960198] env[61974]: _type = "VirtualMachine" [ 1006.960198] env[61974]: }. {{(pid=61974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1006.960534] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the lease: (returnval){ [ 1006.960534] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]528bbd78-f0dd-95b6-61b9-e96c89a695ff" [ 1006.960534] env[61974]: _type = "HttpNfcLease" [ 1006.960534] env[61974]: } to be ready. {{(pid=61974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1006.968954] env[61974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1006.968954] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]528bbd78-f0dd-95b6-61b9-e96c89a695ff" [ 1006.968954] env[61974]: _type = "HttpNfcLease" [ 1006.968954] env[61974]: } is initializing. {{(pid=61974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1007.255079] env[61974]: DEBUG nova.network.neutron [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Updating instance_info_cache with network_info: [{"id": "f2f9e10a-4e37-47fa-8040-638e6376acc6", "address": "fa:16:3e:50:e7:ee", "network": {"id": "b42774a0-686b-4132-a599-7cec777b9919", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1826867553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dae05232e0041dba49b0432d64d82d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2f9e10a-4e", "ovs_interfaceid": "f2f9e10a-4e37-47fa-8040-638e6376acc6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.275811] env[61974]: DEBUG oslo_concurrency.lockutils [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.276021] env[61974]: DEBUG oslo_concurrency.lockutils [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquired lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.276208] env[61974]: DEBUG nova.network.neutron [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1007.320065] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c7102bfb-51a0-4a27-8400-c972befd81ef tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "e6feee04-8aae-4151-8187-3ef4885bcf73" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.533s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.327784] env[61974]: DEBUG oslo_vmware.api [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': task-1379302, 'name': PowerOnVM_Task, 'duration_secs': 0.972855} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.328077] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1007.328293] env[61974]: INFO nova.compute.manager [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Took 9.20 seconds to spawn the instance on the hypervisor. [ 1007.328482] env[61974]: DEBUG nova.compute.manager [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1007.330292] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b06ede45-b0e7-43ab-816b-0d7c979984c5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.333785] env[61974]: DEBUG nova.compute.manager [req-62481634-9f22-4bad-86d5-f1e685f76f44 req-b35af476-c97a-414d-a15a-ef543ea62901 service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Received event network-vif-plugged-915eddb2-5b76-46da-8c84-a99ed89ca777 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1007.333995] env[61974]: DEBUG oslo_concurrency.lockutils [req-62481634-9f22-4bad-86d5-f1e685f76f44 req-b35af476-c97a-414d-a15a-ef543ea62901 service nova] Acquiring lock "ceb0dd02-6441-4923-99f6-73f8eab86fe5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.334220] env[61974]: DEBUG oslo_concurrency.lockutils [req-62481634-9f22-4bad-86d5-f1e685f76f44 req-b35af476-c97a-414d-a15a-ef543ea62901 service nova] Lock "ceb0dd02-6441-4923-99f6-73f8eab86fe5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.334394] env[61974]: DEBUG oslo_concurrency.lockutils [req-62481634-9f22-4bad-86d5-f1e685f76f44 req-b35af476-c97a-414d-a15a-ef543ea62901 service nova] Lock "ceb0dd02-6441-4923-99f6-73f8eab86fe5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.334566] env[61974]: DEBUG nova.compute.manager [req-62481634-9f22-4bad-86d5-f1e685f76f44 req-b35af476-c97a-414d-a15a-ef543ea62901 service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] No waiting events found dispatching network-vif-plugged-915eddb2-5b76-46da-8c84-a99ed89ca777 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1007.334734] env[61974]: WARNING nova.compute.manager [req-62481634-9f22-4bad-86d5-f1e685f76f44 req-b35af476-c97a-414d-a15a-ef543ea62901 service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Received unexpected event network-vif-plugged-915eddb2-5b76-46da-8c84-a99ed89ca777 for instance with vm_state active and task_state None. [ 1007.334901] env[61974]: DEBUG nova.compute.manager [req-62481634-9f22-4bad-86d5-f1e685f76f44 req-b35af476-c97a-414d-a15a-ef543ea62901 service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Received event network-changed-915eddb2-5b76-46da-8c84-a99ed89ca777 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1007.335077] env[61974]: DEBUG nova.compute.manager [req-62481634-9f22-4bad-86d5-f1e685f76f44 req-b35af476-c97a-414d-a15a-ef543ea62901 service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Refreshing instance network info cache due to event network-changed-915eddb2-5b76-46da-8c84-a99ed89ca777. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1007.335277] env[61974]: DEBUG oslo_concurrency.lockutils [req-62481634-9f22-4bad-86d5-f1e685f76f44 req-b35af476-c97a-414d-a15a-ef543ea62901 service nova] Acquiring lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.392200] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379303, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.469417] env[61974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1007.469417] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]528bbd78-f0dd-95b6-61b9-e96c89a695ff" [ 1007.469417] env[61974]: _type = "HttpNfcLease" [ 1007.469417] env[61974]: } is ready. {{(pid=61974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1007.469814] env[61974]: DEBUG oslo_vmware.rw_handles [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1007.469814] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]528bbd78-f0dd-95b6-61b9-e96c89a695ff" [ 1007.469814] env[61974]: _type = "HttpNfcLease" [ 1007.469814] env[61974]: }. {{(pid=61974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1007.470560] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff7ba102-82ba-4b60-96bc-9279cb46409b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.477742] env[61974]: DEBUG oslo_vmware.rw_handles [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bdf98e-9040-8f2b-59c9-7286adcc8a56/disk-0.vmdk from lease info. {{(pid=61974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1007.477924] env[61974]: DEBUG oslo_vmware.rw_handles [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bdf98e-9040-8f2b-59c9-7286adcc8a56/disk-0.vmdk for reading. {{(pid=61974) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1007.587413] env[61974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-015a320d-ca61-404f-a424-e340b7c538b2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.758327] env[61974]: DEBUG oslo_concurrency.lockutils [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "refresh_cache-abe0168a-e838-468a-a223-7c2a64497c0c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.842628] env[61974]: WARNING nova.network.neutron [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] a3cca736-a69c-4d05-a3fd-386cf3c4bee5 already exists in list: networks containing: ['a3cca736-a69c-4d05-a3fd-386cf3c4bee5']. ignoring it [ 1007.863671] env[61974]: INFO nova.compute.manager [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Took 21.25 seconds to build instance. [ 1007.894373] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379303, 'name': ReconfigVM_Task, 'duration_secs': 0.753022} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.897378] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Reconfigured VM instance instance-0000005a to attach disk [datastore2] 603bcf2a-fc99-4ba4-b757-c37d93554870/603bcf2a-fc99-4ba4-b757-c37d93554870.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1007.899576] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-40ef9d3e-1a69-4c7c-9a45-08e70eaca4b9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.907445] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1007.907445] env[61974]: value = "task-1379305" [ 1007.907445] env[61974]: _type = "Task" [ 1007.907445] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.922882] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379305, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.090923] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06fadb3-0438-4cc2-9622-0e60cf9b42c3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.102309] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a3e28f-9f17-415a-89ea-b1b127f748ab {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.144288] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59757db-ff0a-4455-8846-131460678d40 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.154225] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4900a5f7-8c7f-49d6-90db-fd7926a8a9f7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.176959] env[61974]: DEBUG nova.compute.provider_tree [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1008.283724] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62e9ffa-6242-4cd0-a822-346c781bd8c7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.309276] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37d8077-aef0-43c6-b1e4-f703b8f4448b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.317657] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Updating instance 'abe0168a-e838-468a-a223-7c2a64497c0c' progress to 83 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1008.369850] env[61974]: DEBUG oslo_concurrency.lockutils [None req-acea5eeb-6136-45ec-b78b-f9c5cf6300f0 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Lock "ef17d87d-31ae-4d08-afba-157521e7d1e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.764s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.420529] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379305, 'name': Rename_Task, 'duration_secs': 0.182795} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.420937] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1008.421169] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7051e385-6488-4dbe-8dc7-6e9d02fba5df {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.429878] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1008.429878] env[61974]: value = "task-1379306" [ 1008.429878] env[61974]: _type = "Task" [ 1008.429878] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.439877] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379306, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.681899] env[61974]: DEBUG nova.scheduler.client.report [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1008.716041] env[61974]: DEBUG nova.network.neutron [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Successfully updated port: e3d592f0-8ee9-4b5c-9397-cf3da1294c61 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1008.771420] env[61974]: DEBUG nova.network.neutron [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Updating instance_info_cache with network_info: [{"id": "a342d02a-7577-428c-946f-e5725112ceec", "address": "fa:16:3e:99:e8:62", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa342d02a-75", "ovs_interfaceid": "a342d02a-7577-428c-946f-e5725112ceec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "915eddb2-5b76-46da-8c84-a99ed89ca777", "address": "fa:16:3e:d3:e1:45", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap915eddb2-5b", "ovs_interfaceid": "915eddb2-5b76-46da-8c84-a99ed89ca777", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.824078] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1008.824427] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83fffee7-23d9-4136-8ac4-4fb97d08d6d0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.834447] env[61974]: DEBUG oslo_vmware.api [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1008.834447] env[61974]: value = "task-1379307" [ 1008.834447] env[61974]: _type = "Task" [ 1008.834447] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.843866] env[61974]: DEBUG oslo_vmware.api [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379307, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.945393] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379306, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.188588] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.188855] env[61974]: DEBUG nova.compute.manager [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1009.192030] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.386s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.192486] env[61974]: DEBUG nova.objects.instance [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lazy-loading 'resources' on Instance uuid 68ad5903-e502-406b-a19e-9e4c28aa5035 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1009.218603] env[61974]: DEBUG oslo_concurrency.lockutils [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "refresh_cache-a2fbbc4a-92da-4917-a73e-a37a8980c62c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.218787] env[61974]: DEBUG oslo_concurrency.lockutils [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired lock "refresh_cache-a2fbbc4a-92da-4917-a73e-a37a8980c62c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.218972] env[61974]: DEBUG nova.network.neutron [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1009.274320] env[61974]: DEBUG oslo_concurrency.lockutils [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Releasing lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1009.275160] env[61974]: DEBUG oslo_concurrency.lockutils [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.275330] env[61974]: DEBUG oslo_concurrency.lockutils [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquired lock "ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.275721] env[61974]: DEBUG oslo_concurrency.lockutils [req-62481634-9f22-4bad-86d5-f1e685f76f44 req-b35af476-c97a-414d-a15a-ef543ea62901 service nova] Acquired lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.275968] env[61974]: DEBUG nova.network.neutron [req-62481634-9f22-4bad-86d5-f1e685f76f44 req-b35af476-c97a-414d-a15a-ef543ea62901 service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Refreshing network info cache for port 915eddb2-5b76-46da-8c84-a99ed89ca777 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1009.277921] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e694fb-d566-4a5d-af21-327d904210d1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.305641] env[61974]: DEBUG nova.virt.hardware [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1009.306101] env[61974]: DEBUG nova.virt.hardware [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1009.306389] env[61974]: DEBUG nova.virt.hardware [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1009.306765] env[61974]: DEBUG nova.virt.hardware [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1009.306946] env[61974]: DEBUG nova.virt.hardware [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1009.307270] env[61974]: DEBUG nova.virt.hardware [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1009.307491] env[61974]: DEBUG nova.virt.hardware [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1009.307730] env[61974]: DEBUG nova.virt.hardware [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1009.308027] env[61974]: DEBUG nova.virt.hardware [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1009.308312] env[61974]: DEBUG nova.virt.hardware [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1009.308584] env[61974]: DEBUG nova.virt.hardware [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1009.319127] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Reconfiguring VM to attach interface {{(pid=61974) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1009.320172] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b0b552d-5e6a-4fbc-b29b-ab15581909f7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.342074] env[61974]: DEBUG oslo_vmware.api [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 1009.342074] env[61974]: value = "task-1379308" [ 1009.342074] env[61974]: _type = "Task" [ 1009.342074] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.346523] env[61974]: DEBUG oslo_vmware.api [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379307, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.356203] env[61974]: DEBUG oslo_vmware.api [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379308, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.443475] env[61974]: DEBUG oslo_vmware.api [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379306, 'name': PowerOnVM_Task, 'duration_secs': 0.639579} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.443893] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1009.443893] env[61974]: INFO nova.compute.manager [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Took 8.88 seconds to spawn the instance on the hypervisor. [ 1009.444014] env[61974]: DEBUG nova.compute.manager [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1009.444803] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c32f32-7391-4979-9921-214edf2c518d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.507045] env[61974]: DEBUG nova.compute.manager [req-e7633de6-a752-4243-8984-e2162bef1779 req-de047af9-9b91-4183-a832-1e7dfb092e2c service nova] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Received event network-vif-plugged-e3d592f0-8ee9-4b5c-9397-cf3da1294c61 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1009.507382] env[61974]: DEBUG oslo_concurrency.lockutils [req-e7633de6-a752-4243-8984-e2162bef1779 req-de047af9-9b91-4183-a832-1e7dfb092e2c service nova] Acquiring lock "a2fbbc4a-92da-4917-a73e-a37a8980c62c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.507524] env[61974]: DEBUG oslo_concurrency.lockutils [req-e7633de6-a752-4243-8984-e2162bef1779 req-de047af9-9b91-4183-a832-1e7dfb092e2c service nova] Lock "a2fbbc4a-92da-4917-a73e-a37a8980c62c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.507708] env[61974]: DEBUG oslo_concurrency.lockutils [req-e7633de6-a752-4243-8984-e2162bef1779 req-de047af9-9b91-4183-a832-1e7dfb092e2c service nova] Lock "a2fbbc4a-92da-4917-a73e-a37a8980c62c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.507888] env[61974]: DEBUG nova.compute.manager [req-e7633de6-a752-4243-8984-e2162bef1779 req-de047af9-9b91-4183-a832-1e7dfb092e2c service nova] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] No waiting events found dispatching network-vif-plugged-e3d592f0-8ee9-4b5c-9397-cf3da1294c61 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1009.508095] env[61974]: WARNING nova.compute.manager [req-e7633de6-a752-4243-8984-e2162bef1779 req-de047af9-9b91-4183-a832-1e7dfb092e2c service nova] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Received unexpected event network-vif-plugged-e3d592f0-8ee9-4b5c-9397-cf3da1294c61 for instance with vm_state building and task_state spawning. [ 1009.508275] env[61974]: DEBUG nova.compute.manager [req-e7633de6-a752-4243-8984-e2162bef1779 req-de047af9-9b91-4183-a832-1e7dfb092e2c service nova] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Received event network-changed-e3d592f0-8ee9-4b5c-9397-cf3da1294c61 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1009.508440] env[61974]: DEBUG nova.compute.manager [req-e7633de6-a752-4243-8984-e2162bef1779 req-de047af9-9b91-4183-a832-1e7dfb092e2c service nova] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Refreshing instance network info cache due to event network-changed-e3d592f0-8ee9-4b5c-9397-cf3da1294c61. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1009.508706] env[61974]: DEBUG oslo_concurrency.lockutils [req-e7633de6-a752-4243-8984-e2162bef1779 req-de047af9-9b91-4183-a832-1e7dfb092e2c service nova] Acquiring lock "refresh_cache-a2fbbc4a-92da-4917-a73e-a37a8980c62c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.695493] env[61974]: DEBUG nova.compute.utils [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1009.700253] env[61974]: DEBUG nova.compute.manager [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1009.700626] env[61974]: DEBUG nova.network.neutron [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1009.762431] env[61974]: DEBUG nova.network.neutron [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1009.775028] env[61974]: DEBUG nova.policy [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c5f6d80a0784b1f8ff2b2fcfbb44232', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '40e43abf62a5464091aa725e1cff2b50', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1009.849382] env[61974]: DEBUG oslo_vmware.api [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379307, 'name': PowerOnVM_Task, 'duration_secs': 0.536532} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.854724] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1009.854946] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-973f8cf3-56eb-410f-b90b-57b4e3fda571 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Updating instance 'abe0168a-e838-468a-a223-7c2a64497c0c' progress to 100 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1009.869556] env[61974]: DEBUG oslo_vmware.api [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379308, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.966888] env[61974]: INFO nova.compute.manager [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Took 19.87 seconds to build instance. [ 1009.983673] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e16493-9403-4d8b-9170-25641e8d23ce {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.995048] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16b0ccb-f479-4a02-b0d3-d84199eafc9a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.030737] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b8ac864-1ec9-4fbf-acc3-05c4ea51f849 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.042498] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82386e10-fe07-477e-bf02-06347f7ec293 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.058970] env[61974]: DEBUG nova.compute.provider_tree [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1010.073194] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Acquiring lock "ef17d87d-31ae-4d08-afba-157521e7d1e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.073463] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Lock "ef17d87d-31ae-4d08-afba-157521e7d1e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.073701] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Acquiring lock "ef17d87d-31ae-4d08-afba-157521e7d1e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.074050] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Lock "ef17d87d-31ae-4d08-afba-157521e7d1e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.074288] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Lock "ef17d87d-31ae-4d08-afba-157521e7d1e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.076448] env[61974]: INFO nova.compute.manager [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Terminating instance [ 1010.078389] env[61974]: DEBUG nova.compute.manager [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1010.078642] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1010.079458] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-758d84e9-519a-4c8c-b45d-2c77b395649d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.090458] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1010.093221] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3be5ac64-7c50-4383-8ca3-edbfe5485e49 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.104566] env[61974]: DEBUG oslo_vmware.api [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Waiting for the task: (returnval){ [ 1010.104566] env[61974]: value = "task-1379309" [ 1010.104566] env[61974]: _type = "Task" [ 1010.104566] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.116169] env[61974]: DEBUG oslo_vmware.api [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': task-1379309, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.165480] env[61974]: DEBUG nova.network.neutron [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Updating instance_info_cache with network_info: [{"id": "e3d592f0-8ee9-4b5c-9397-cf3da1294c61", "address": "fa:16:3e:b5:a5:e5", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3d592f0-8e", "ovs_interfaceid": "e3d592f0-8ee9-4b5c-9397-cf3da1294c61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.200766] env[61974]: DEBUG nova.compute.manager [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1010.346984] env[61974]: DEBUG nova.network.neutron [req-62481634-9f22-4bad-86d5-f1e685f76f44 req-b35af476-c97a-414d-a15a-ef543ea62901 service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Updated VIF entry in instance network info cache for port 915eddb2-5b76-46da-8c84-a99ed89ca777. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1010.347525] env[61974]: DEBUG nova.network.neutron [req-62481634-9f22-4bad-86d5-f1e685f76f44 req-b35af476-c97a-414d-a15a-ef543ea62901 service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Updating instance_info_cache with network_info: [{"id": "a342d02a-7577-428c-946f-e5725112ceec", "address": "fa:16:3e:99:e8:62", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa342d02a-75", "ovs_interfaceid": "a342d02a-7577-428c-946f-e5725112ceec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "915eddb2-5b76-46da-8c84-a99ed89ca777", "address": "fa:16:3e:d3:e1:45", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap915eddb2-5b", "ovs_interfaceid": "915eddb2-5b76-46da-8c84-a99ed89ca777", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.360113] env[61974]: DEBUG oslo_vmware.api [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379308, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.463154] env[61974]: DEBUG nova.network.neutron [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Successfully created port: 49aba758-8fda-480f-9179-23a891374764 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1010.472432] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0737e79f-b287-44d8-8ebd-e72b2bc471fc tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "603bcf2a-fc99-4ba4-b757-c37d93554870" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.387s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.562571] env[61974]: DEBUG nova.scheduler.client.report [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1010.614888] env[61974]: DEBUG oslo_vmware.api [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': task-1379309, 'name': PowerOffVM_Task, 'duration_secs': 0.274492} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.615212] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1010.615427] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1010.615708] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6dae92e-e357-47c5-934f-9d34a54894d6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.669860] env[61974]: DEBUG oslo_concurrency.lockutils [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Releasing lock "refresh_cache-a2fbbc4a-92da-4917-a73e-a37a8980c62c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.670278] env[61974]: DEBUG nova.compute.manager [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Instance network_info: |[{"id": "e3d592f0-8ee9-4b5c-9397-cf3da1294c61", "address": "fa:16:3e:b5:a5:e5", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3d592f0-8e", "ovs_interfaceid": "e3d592f0-8ee9-4b5c-9397-cf3da1294c61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1010.670649] env[61974]: DEBUG oslo_concurrency.lockutils [req-e7633de6-a752-4243-8984-e2162bef1779 req-de047af9-9b91-4183-a832-1e7dfb092e2c service nova] Acquired lock "refresh_cache-a2fbbc4a-92da-4917-a73e-a37a8980c62c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.670898] env[61974]: DEBUG nova.network.neutron [req-e7633de6-a752-4243-8984-e2162bef1779 req-de047af9-9b91-4183-a832-1e7dfb092e2c service nova] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Refreshing network info cache for port e3d592f0-8ee9-4b5c-9397-cf3da1294c61 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1010.672645] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:a5:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c791d09c-1086-4ee1-bcde-6ca7d259cabd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e3d592f0-8ee9-4b5c-9397-cf3da1294c61', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1010.682189] env[61974]: DEBUG oslo.service.loopingcall [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1010.683341] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1010.683656] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-62f480a2-fffc-405b-8aa8-c0b0f2744e68 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.703541] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1010.704598] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1010.704598] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Deleting the datastore file [datastore2] ef17d87d-31ae-4d08-afba-157521e7d1e3 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1010.704937] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f8b6ac43-1e0d-4a63-9598-c3e54568f52f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.712905] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1010.712905] env[61974]: value = "task-1379311" [ 1010.712905] env[61974]: _type = "Task" [ 1010.712905] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.718246] env[61974]: DEBUG oslo_vmware.api [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Waiting for the task: (returnval){ [ 1010.718246] env[61974]: value = "task-1379312" [ 1010.718246] env[61974]: _type = "Task" [ 1010.718246] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.727802] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379311, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.734218] env[61974]: DEBUG oslo_vmware.api [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': task-1379312, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.850910] env[61974]: DEBUG oslo_concurrency.lockutils [req-62481634-9f22-4bad-86d5-f1e685f76f44 req-b35af476-c97a-414d-a15a-ef543ea62901 service nova] Releasing lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.863128] env[61974]: DEBUG oslo_vmware.api [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379308, 'name': ReconfigVM_Task, 'duration_secs': 1.023422} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.863534] env[61974]: DEBUG oslo_concurrency.lockutils [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Releasing lock "ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.863868] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Reconfigured VM to attach interface {{(pid=61974) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1011.067940] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.876s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.096508] env[61974]: INFO nova.scheduler.client.report [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Deleted allocations for instance 68ad5903-e502-406b-a19e-9e4c28aa5035 [ 1011.213346] env[61974]: DEBUG nova.compute.manager [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1011.235306] env[61974]: DEBUG oslo_vmware.api [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Task: {'id': task-1379312, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.215582} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.235478] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379311, 'name': CreateVM_Task, 'duration_secs': 0.383398} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.235575] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1011.235778] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1011.235924] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1011.236127] env[61974]: INFO nova.compute.manager [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1011.236378] env[61974]: DEBUG oslo.service.loopingcall [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1011.236523] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1011.236759] env[61974]: DEBUG nova.compute.manager [-] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1011.236870] env[61974]: DEBUG nova.network.neutron [-] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1011.238884] env[61974]: DEBUG oslo_concurrency.lockutils [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.239068] env[61974]: DEBUG oslo_concurrency.lockutils [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.239381] env[61974]: DEBUG oslo_concurrency.lockutils [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1011.241652] env[61974]: DEBUG nova.virt.hardware [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='73042eb31719b2b628f5d80aeba8199c',container_format='bare',created_at=2024-10-29T21:01:00Z,direct_url=,disk_format='vmdk',id=7303d3bd-2aee-4964-855b-6068bc1100ed,min_disk=1,min_ram=0,name='tempest-test-snap-246722088',owner='40e43abf62a5464091aa725e1cff2b50',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-10-29T21:01:15Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1011.241876] env[61974]: DEBUG nova.virt.hardware [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1011.242053] env[61974]: DEBUG nova.virt.hardware [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1011.242246] env[61974]: DEBUG nova.virt.hardware [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1011.242399] env[61974]: DEBUG nova.virt.hardware [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1011.242560] env[61974]: DEBUG nova.virt.hardware [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1011.242758] env[61974]: DEBUG nova.virt.hardware [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1011.242947] env[61974]: DEBUG nova.virt.hardware [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1011.243165] env[61974]: DEBUG nova.virt.hardware [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1011.243343] env[61974]: DEBUG nova.virt.hardware [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1011.243523] env[61974]: DEBUG nova.virt.hardware [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1011.244159] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e454e15-bb62-4500-8b59-5ac0b88dff47 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.246070] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd36d40d-e80d-430d-93f6-f54f4a37a124 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.255637] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df057e24-3f85-4d25-a529-4edb17508784 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.259632] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1011.259632] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]526b78b5-0799-1f39-b88c-5e4c619d963b" [ 1011.259632] env[61974]: _type = "Task" [ 1011.259632] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.277276] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]526b78b5-0799-1f39-b88c-5e4c619d963b, 'name': SearchDatastore_Task, 'duration_secs': 0.02231} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.277575] env[61974]: DEBUG oslo_concurrency.lockutils [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.277811] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1011.278149] env[61974]: DEBUG oslo_concurrency.lockutils [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.278344] env[61974]: DEBUG oslo_concurrency.lockutils [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.278570] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1011.278880] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-019bcacf-9c1d-4367-91f1-169c76e422df {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.291646] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1011.291881] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1011.292657] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6410c36-98d5-485b-aec9-23ee3117a2af {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.298010] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1011.298010] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52595e2a-2ee5-6360-591d-e25578957e08" [ 1011.298010] env[61974]: _type = "Task" [ 1011.298010] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.306163] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52595e2a-2ee5-6360-591d-e25578957e08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.368453] env[61974]: DEBUG oslo_concurrency.lockutils [None req-99cfb288-a5dd-4989-91c5-8419955d9ef7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "interface-ceb0dd02-6441-4923-99f6-73f8eab86fe5-915eddb2-5b76-46da-8c84-a99ed89ca777" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.428s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.605455] env[61974]: DEBUG oslo_concurrency.lockutils [None req-afc4bc3f-7cd9-4880-b642-fec97dc82b4e tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "68ad5903-e502-406b-a19e-9e4c28aa5035" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.894s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.644723] env[61974]: DEBUG nova.network.neutron [req-e7633de6-a752-4243-8984-e2162bef1779 req-de047af9-9b91-4183-a832-1e7dfb092e2c service nova] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Updated VIF entry in instance network info cache for port e3d592f0-8ee9-4b5c-9397-cf3da1294c61. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1011.645161] env[61974]: DEBUG nova.network.neutron [req-e7633de6-a752-4243-8984-e2162bef1779 req-de047af9-9b91-4183-a832-1e7dfb092e2c service nova] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Updating instance_info_cache with network_info: [{"id": "e3d592f0-8ee9-4b5c-9397-cf3da1294c61", "address": "fa:16:3e:b5:a5:e5", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3d592f0-8e", "ovs_interfaceid": "e3d592f0-8ee9-4b5c-9397-cf3da1294c61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.815218] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52595e2a-2ee5-6360-591d-e25578957e08, 'name': SearchDatastore_Task, 'duration_secs': 0.038251} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.816759] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f90d8b90-5aa3-4e43-9c1e-6ef162cf51cd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.824154] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1011.824154] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52aba7a1-e3c6-5aea-5861-93777b18a342" [ 1011.824154] env[61974]: _type = "Task" [ 1011.824154] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.836556] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52aba7a1-e3c6-5aea-5861-93777b18a342, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.148616] env[61974]: DEBUG oslo_concurrency.lockutils [req-e7633de6-a752-4243-8984-e2162bef1779 req-de047af9-9b91-4183-a832-1e7dfb092e2c service nova] Releasing lock "refresh_cache-a2fbbc4a-92da-4917-a73e-a37a8980c62c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.221609] env[61974]: DEBUG nova.compute.manager [req-ba15a7f9-fbd9-4b34-a492-710cc1bc8b37 req-ed707e8b-fcc7-4509-ad4a-ab1cd2bd59a3 service nova] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Received event network-vif-deleted-8e4ec8ca-7d59-4a5f-af91-ca1c71946996 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1012.221865] env[61974]: INFO nova.compute.manager [req-ba15a7f9-fbd9-4b34-a492-710cc1bc8b37 req-ed707e8b-fcc7-4509-ad4a-ab1cd2bd59a3 service nova] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Neutron deleted interface 8e4ec8ca-7d59-4a5f-af91-ca1c71946996; detaching it from the instance and deleting it from the info cache [ 1012.222171] env[61974]: DEBUG nova.network.neutron [req-ba15a7f9-fbd9-4b34-a492-710cc1bc8b37 req-ed707e8b-fcc7-4509-ad4a-ab1cd2bd59a3 service nova] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.242908] env[61974]: DEBUG nova.network.neutron [-] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.341466] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52aba7a1-e3c6-5aea-5861-93777b18a342, 'name': SearchDatastore_Task, 'duration_secs': 0.029065} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.341802] env[61974]: DEBUG oslo_concurrency.lockutils [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.342116] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] a2fbbc4a-92da-4917-a73e-a37a8980c62c/a2fbbc4a-92da-4917-a73e-a37a8980c62c.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1012.342441] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f1e6a259-dbd7-4b2c-9e78-d701967642d5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.352566] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1012.352566] env[61974]: value = "task-1379313" [ 1012.352566] env[61974]: _type = "Task" [ 1012.352566] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.363944] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379313, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.394827] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "912ff104-9c97-4486-99c8-71a35180abb0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.395101] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "912ff104-9c97-4486-99c8-71a35180abb0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.401308] env[61974]: DEBUG nova.network.neutron [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Successfully updated port: 49aba758-8fda-480f-9179-23a891374764 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1012.724764] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-80c5f1e7-d1fa-4941-afdd-6881ecdd3b87 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.735876] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c33c92-8304-472b-b10b-dfb3bbd92517 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.747565] env[61974]: INFO nova.compute.manager [-] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Took 1.51 seconds to deallocate network for instance. [ 1012.780548] env[61974]: DEBUG nova.compute.manager [req-ba15a7f9-fbd9-4b34-a492-710cc1bc8b37 req-ed707e8b-fcc7-4509-ad4a-ab1cd2bd59a3 service nova] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Detach interface failed, port_id=8e4ec8ca-7d59-4a5f-af91-ca1c71946996, reason: Instance ef17d87d-31ae-4d08-afba-157521e7d1e3 could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1012.814810] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "450956aa-cc55-481c-acf6-287abc8b8efe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.815185] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "450956aa-cc55-481c-acf6-287abc8b8efe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.815469] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "450956aa-cc55-481c-acf6-287abc8b8efe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.815792] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "450956aa-cc55-481c-acf6-287abc8b8efe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.816119] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "450956aa-cc55-481c-acf6-287abc8b8efe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.818934] env[61974]: INFO nova.compute.manager [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Terminating instance [ 1012.821644] env[61974]: DEBUG nova.compute.manager [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1012.821890] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1012.822849] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad911a32-5024-4258-9b64-95e46378a561 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.836214] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1012.836598] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66065f52-2b6f-4875-b8d0-119cf6176a8e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.846495] env[61974]: DEBUG oslo_vmware.api [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 1012.846495] env[61974]: value = "task-1379314" [ 1012.846495] env[61974]: _type = "Task" [ 1012.846495] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.862656] env[61974]: DEBUG oslo_vmware.api [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379314, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.869334] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379313, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.898177] env[61974]: DEBUG nova.compute.manager [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1012.903751] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "refresh_cache-6e698472-b4c0-45dc-869d-d51bbe00552c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.903916] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquired lock "refresh_cache-6e698472-b4c0-45dc-869d-d51bbe00552c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.904081] env[61974]: DEBUG nova.network.neutron [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1012.997633] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0fc0ad71-b48b-4c3d-b1f0-fa628c6fc667 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "abe0168a-e838-468a-a223-7c2a64497c0c" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.998150] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0fc0ad71-b48b-4c3d-b1f0-fa628c6fc667 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "abe0168a-e838-468a-a223-7c2a64497c0c" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.998543] env[61974]: DEBUG nova.compute.manager [None req-0fc0ad71-b48b-4c3d-b1f0-fa628c6fc667 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Going to confirm migration 1 {{(pid=61974) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1013.044033] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "interface-ceb0dd02-6441-4923-99f6-73f8eab86fe5-915eddb2-5b76-46da-8c84-a99ed89ca777" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.044220] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "interface-ceb0dd02-6441-4923-99f6-73f8eab86fe5-915eddb2-5b76-46da-8c84-a99ed89ca777" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.254620] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.254909] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.255283] env[61974]: DEBUG nova.objects.instance [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Lazy-loading 'resources' on Instance uuid ef17d87d-31ae-4d08-afba-157521e7d1e3 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1013.357096] env[61974]: DEBUG oslo_vmware.api [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379314, 'name': PowerOffVM_Task, 'duration_secs': 0.360646} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.360289] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1013.360472] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1013.360741] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3ff5dfd1-5cf4-4528-aaed-6db683929497 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.367595] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379313, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.719251} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.367853] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] a2fbbc4a-92da-4917-a73e-a37a8980c62c/a2fbbc4a-92da-4917-a73e-a37a8980c62c.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1013.368096] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1013.368352] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f4f32028-3f47-4fd3-abdc-bb3b15930ceb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.375754] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1013.375754] env[61974]: value = "task-1379316" [ 1013.375754] env[61974]: _type = "Task" [ 1013.375754] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.385442] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379316, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.423399] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.441462] env[61974]: DEBUG nova.network.neutron [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1013.445539] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1013.445902] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1013.446150] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Deleting the datastore file [datastore1] 450956aa-cc55-481c-acf6-287abc8b8efe {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1013.446418] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9156096e-3ec8-4792-8477-691109f8688e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.454557] env[61974]: DEBUG oslo_vmware.api [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for the task: (returnval){ [ 1013.454557] env[61974]: value = "task-1379317" [ 1013.454557] env[61974]: _type = "Task" [ 1013.454557] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.464306] env[61974]: DEBUG oslo_vmware.api [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379317, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.548205] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1013.548205] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquired lock "ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.548516] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7248955e-b82e-4b6d-aaf8-7535f361b72d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.569103] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08dcb884-1cd4-489e-afb7-675e16531d51 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.602234] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Reconfiguring VM to detach interface {{(pid=61974) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1013.604280] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0fc0ad71-b48b-4c3d-b1f0-fa628c6fc667 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "refresh_cache-abe0168a-e838-468a-a223-7c2a64497c0c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1013.604280] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0fc0ad71-b48b-4c3d-b1f0-fa628c6fc667 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "refresh_cache-abe0168a-e838-468a-a223-7c2a64497c0c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.604280] env[61974]: DEBUG nova.network.neutron [None req-0fc0ad71-b48b-4c3d-b1f0-fa628c6fc667 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1013.604280] env[61974]: DEBUG nova.objects.instance [None req-0fc0ad71-b48b-4c3d-b1f0-fa628c6fc667 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lazy-loading 'info_cache' on Instance uuid abe0168a-e838-468a-a223-7c2a64497c0c {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1013.607898] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b7b52b8-d05d-4cdd-9b44-e29c259e8d02 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.634358] env[61974]: DEBUG oslo_vmware.api [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 1013.634358] env[61974]: value = "task-1379318" [ 1013.634358] env[61974]: _type = "Task" [ 1013.634358] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.645509] env[61974]: DEBUG oslo_vmware.api [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379318, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.747775] env[61974]: DEBUG nova.network.neutron [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Updating instance_info_cache with network_info: [{"id": "49aba758-8fda-480f-9179-23a891374764", "address": "fa:16:3e:56:78:a4", "network": {"id": "870e2179-016d-4c2a-8dff-b56143e5db7b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1966833648-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40e43abf62a5464091aa725e1cff2b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49aba758-8f", "ovs_interfaceid": "49aba758-8fda-480f-9179-23a891374764", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.889406] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379316, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076558} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.889744] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1013.890618] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66fcc6ea-af6b-4dcc-9efd-ed693d9cc0e4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.913932] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] a2fbbc4a-92da-4917-a73e-a37a8980c62c/a2fbbc4a-92da-4917-a73e-a37a8980c62c.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1013.916939] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f93cda4c-a963-40e8-918f-3330ccc39ce9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.939403] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1013.939403] env[61974]: value = "task-1379319" [ 1013.939403] env[61974]: _type = "Task" [ 1013.939403] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.949410] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379319, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.965332] env[61974]: DEBUG oslo_vmware.api [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Task: {'id': task-1379317, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.378598} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.965679] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1013.965875] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1013.966086] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1013.966371] env[61974]: INFO nova.compute.manager [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1013.966746] env[61974]: DEBUG oslo.service.loopingcall [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1013.966972] env[61974]: DEBUG nova.compute.manager [-] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1013.967090] env[61974]: DEBUG nova.network.neutron [-] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1014.059606] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4afdbdda-3ae4-485e-b883-dd26599f4281 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.068550] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d1eace-bf78-4cbf-8688-2995dd92d4fe {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.101044] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e733db-b283-484b-a6c1-39fc3da5657c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.110252] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31b7f8a0-a4f6-4a39-b234-3c94d412f576 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.125107] env[61974]: DEBUG nova.compute.provider_tree [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1014.145778] env[61974]: DEBUG oslo_vmware.api [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.250964] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Releasing lock "refresh_cache-6e698472-b4c0-45dc-869d-d51bbe00552c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.251256] env[61974]: DEBUG nova.compute.manager [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Instance network_info: |[{"id": "49aba758-8fda-480f-9179-23a891374764", "address": "fa:16:3e:56:78:a4", "network": {"id": "870e2179-016d-4c2a-8dff-b56143e5db7b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1966833648-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40e43abf62a5464091aa725e1cff2b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49aba758-8f", "ovs_interfaceid": "49aba758-8fda-480f-9179-23a891374764", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1014.251703] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:78:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '171aeae0-6a27-44fc-bc3d-a2d5581fc702', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '49aba758-8fda-480f-9179-23a891374764', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1014.260527] env[61974]: DEBUG oslo.service.loopingcall [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1014.263245] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1014.263553] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1be1acbf-4695-409d-b6ae-446fa825eeb8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.287547] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1014.287547] env[61974]: value = "task-1379320" [ 1014.287547] env[61974]: _type = "Task" [ 1014.287547] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.296585] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379320, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.329995] env[61974]: DEBUG nova.compute.manager [req-58288e44-751b-450c-a2c2-d30f74a54067 req-7d8942d3-e685-4e1c-b0f2-f911e1ac3bc7 service nova] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Received event network-vif-plugged-49aba758-8fda-480f-9179-23a891374764 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1014.330272] env[61974]: DEBUG oslo_concurrency.lockutils [req-58288e44-751b-450c-a2c2-d30f74a54067 req-7d8942d3-e685-4e1c-b0f2-f911e1ac3bc7 service nova] Acquiring lock "6e698472-b4c0-45dc-869d-d51bbe00552c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.330406] env[61974]: DEBUG oslo_concurrency.lockutils [req-58288e44-751b-450c-a2c2-d30f74a54067 req-7d8942d3-e685-4e1c-b0f2-f911e1ac3bc7 service nova] Lock "6e698472-b4c0-45dc-869d-d51bbe00552c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.330620] env[61974]: DEBUG oslo_concurrency.lockutils [req-58288e44-751b-450c-a2c2-d30f74a54067 req-7d8942d3-e685-4e1c-b0f2-f911e1ac3bc7 service nova] Lock "6e698472-b4c0-45dc-869d-d51bbe00552c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.330936] env[61974]: DEBUG nova.compute.manager [req-58288e44-751b-450c-a2c2-d30f74a54067 req-7d8942d3-e685-4e1c-b0f2-f911e1ac3bc7 service nova] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] No waiting events found dispatching network-vif-plugged-49aba758-8fda-480f-9179-23a891374764 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1014.331728] env[61974]: WARNING nova.compute.manager [req-58288e44-751b-450c-a2c2-d30f74a54067 req-7d8942d3-e685-4e1c-b0f2-f911e1ac3bc7 service nova] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Received unexpected event network-vif-plugged-49aba758-8fda-480f-9179-23a891374764 for instance with vm_state building and task_state spawning. [ 1014.331728] env[61974]: DEBUG nova.compute.manager [req-58288e44-751b-450c-a2c2-d30f74a54067 req-7d8942d3-e685-4e1c-b0f2-f911e1ac3bc7 service nova] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Received event network-changed-49aba758-8fda-480f-9179-23a891374764 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1014.331728] env[61974]: DEBUG nova.compute.manager [req-58288e44-751b-450c-a2c2-d30f74a54067 req-7d8942d3-e685-4e1c-b0f2-f911e1ac3bc7 service nova] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Refreshing instance network info cache due to event network-changed-49aba758-8fda-480f-9179-23a891374764. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1014.331728] env[61974]: DEBUG oslo_concurrency.lockutils [req-58288e44-751b-450c-a2c2-d30f74a54067 req-7d8942d3-e685-4e1c-b0f2-f911e1ac3bc7 service nova] Acquiring lock "refresh_cache-6e698472-b4c0-45dc-869d-d51bbe00552c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1014.331859] env[61974]: DEBUG oslo_concurrency.lockutils [req-58288e44-751b-450c-a2c2-d30f74a54067 req-7d8942d3-e685-4e1c-b0f2-f911e1ac3bc7 service nova] Acquired lock "refresh_cache-6e698472-b4c0-45dc-869d-d51bbe00552c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.332017] env[61974]: DEBUG nova.network.neutron [req-58288e44-751b-450c-a2c2-d30f74a54067 req-7d8942d3-e685-4e1c-b0f2-f911e1ac3bc7 service nova] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Refreshing network info cache for port 49aba758-8fda-480f-9179-23a891374764 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1014.451799] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379319, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.510760] env[61974]: DEBUG nova.network.neutron [None req-0fc0ad71-b48b-4c3d-b1f0-fa628c6fc667 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Updating instance_info_cache with network_info: [{"id": "f2f9e10a-4e37-47fa-8040-638e6376acc6", "address": "fa:16:3e:50:e7:ee", "network": {"id": "b42774a0-686b-4132-a599-7cec777b9919", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1826867553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dae05232e0041dba49b0432d64d82d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2f9e10a-4e", "ovs_interfaceid": "f2f9e10a-4e37-47fa-8040-638e6376acc6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.628202] env[61974]: DEBUG nova.scheduler.client.report [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1014.647275] env[61974]: DEBUG oslo_vmware.api [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.801244] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379320, 'name': CreateVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.859600] env[61974]: DEBUG nova.network.neutron [-] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.953101] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379319, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.013292] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0fc0ad71-b48b-4c3d-b1f0-fa628c6fc667 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "refresh_cache-abe0168a-e838-468a-a223-7c2a64497c0c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.013630] env[61974]: DEBUG nova.objects.instance [None req-0fc0ad71-b48b-4c3d-b1f0-fa628c6fc667 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lazy-loading 'migration_context' on Instance uuid abe0168a-e838-468a-a223-7c2a64497c0c {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1015.121286] env[61974]: DEBUG nova.network.neutron [req-58288e44-751b-450c-a2c2-d30f74a54067 req-7d8942d3-e685-4e1c-b0f2-f911e1ac3bc7 service nova] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Updated VIF entry in instance network info cache for port 49aba758-8fda-480f-9179-23a891374764. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1015.121686] env[61974]: DEBUG nova.network.neutron [req-58288e44-751b-450c-a2c2-d30f74a54067 req-7d8942d3-e685-4e1c-b0f2-f911e1ac3bc7 service nova] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Updating instance_info_cache with network_info: [{"id": "49aba758-8fda-480f-9179-23a891374764", "address": "fa:16:3e:56:78:a4", "network": {"id": "870e2179-016d-4c2a-8dff-b56143e5db7b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1966833648-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40e43abf62a5464091aa725e1cff2b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49aba758-8f", "ovs_interfaceid": "49aba758-8fda-480f-9179-23a891374764", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.133902] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.879s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.136856] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.713s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.137824] env[61974]: INFO nova.compute.claims [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1015.150885] env[61974]: DEBUG oslo_vmware.api [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.159609] env[61974]: INFO nova.scheduler.client.report [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Deleted allocations for instance ef17d87d-31ae-4d08-afba-157521e7d1e3 [ 1015.300146] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379320, 'name': CreateVM_Task, 'duration_secs': 0.548427} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.300350] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1015.301391] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7303d3bd-2aee-4964-855b-6068bc1100ed" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1015.301391] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7303d3bd-2aee-4964-855b-6068bc1100ed" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.301787] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7303d3bd-2aee-4964-855b-6068bc1100ed" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1015.302110] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9334e943-1264-42b2-9b27-f69cc9496d14 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.307622] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1015.307622] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]522c5e2e-d8cf-fedf-ffce-6829f3a6ab51" [ 1015.307622] env[61974]: _type = "Task" [ 1015.307622] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.318205] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]522c5e2e-d8cf-fedf-ffce-6829f3a6ab51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.362229] env[61974]: INFO nova.compute.manager [-] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Took 1.39 seconds to deallocate network for instance. [ 1015.452633] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379319, 'name': ReconfigVM_Task, 'duration_secs': 1.474653} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.453069] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Reconfigured VM instance instance-0000005b to attach disk [datastore2] a2fbbc4a-92da-4917-a73e-a37a8980c62c/a2fbbc4a-92da-4917-a73e-a37a8980c62c.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1015.453621] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-afd02211-4207-4cc6-888e-3da24dd49dfd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.461842] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1015.461842] env[61974]: value = "task-1379321" [ 1015.461842] env[61974]: _type = "Task" [ 1015.461842] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.471390] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379321, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.516486] env[61974]: DEBUG nova.objects.base [None req-0fc0ad71-b48b-4c3d-b1f0-fa628c6fc667 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=61974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1015.517774] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31aebfa7-bca1-49fb-b848-282fffecc2e5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.539726] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c27fd7e1-8bb6-411c-8397-8fc10716775d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.546381] env[61974]: DEBUG oslo_vmware.api [None req-0fc0ad71-b48b-4c3d-b1f0-fa628c6fc667 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1015.546381] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52bd9903-50a9-dc1a-607f-3451eedb87a6" [ 1015.546381] env[61974]: _type = "Task" [ 1015.546381] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.557248] env[61974]: DEBUG oslo_vmware.api [None req-0fc0ad71-b48b-4c3d-b1f0-fa628c6fc667 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52bd9903-50a9-dc1a-607f-3451eedb87a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.624715] env[61974]: DEBUG oslo_concurrency.lockutils [req-58288e44-751b-450c-a2c2-d30f74a54067 req-7d8942d3-e685-4e1c-b0f2-f911e1ac3bc7 service nova] Releasing lock "refresh_cache-6e698472-b4c0-45dc-869d-d51bbe00552c" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.662688] env[61974]: DEBUG oslo_vmware.api [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.666676] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8781514c-a1cd-4ec2-8240-430bfb6fa220 tempest-InstanceActionsNegativeTestJSON-37384560 tempest-InstanceActionsNegativeTestJSON-37384560-project-member] Lock "ef17d87d-31ae-4d08-afba-157521e7d1e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.593s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.824461] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7303d3bd-2aee-4964-855b-6068bc1100ed" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.824836] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Processing image 7303d3bd-2aee-4964-855b-6068bc1100ed {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1015.825028] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7303d3bd-2aee-4964-855b-6068bc1100ed/7303d3bd-2aee-4964-855b-6068bc1100ed.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1015.825191] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7303d3bd-2aee-4964-855b-6068bc1100ed/7303d3bd-2aee-4964-855b-6068bc1100ed.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.825376] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1015.825638] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-548ae425-dcbd-4884-8fc5-d4478ad7b335 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.836729] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1015.836990] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1015.838611] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e52bef0-06e1-4c59-98f5-0f8901139a3c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.848082] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1015.848082] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52f1cc36-148a-0f94-3244-4eb5f9d7d8c6" [ 1015.848082] env[61974]: _type = "Task" [ 1015.848082] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.857838] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52f1cc36-148a-0f94-3244-4eb5f9d7d8c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.869342] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.972535] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379321, 'name': Rename_Task, 'duration_secs': 0.158649} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.972829] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1015.973109] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a77c5330-80a0-42de-aea4-bf32277e4e0f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.980812] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1015.980812] env[61974]: value = "task-1379322" [ 1015.980812] env[61974]: _type = "Task" [ 1015.980812] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.989711] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379322, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.058503] env[61974]: DEBUG oslo_vmware.api [None req-0fc0ad71-b48b-4c3d-b1f0-fa628c6fc667 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52bd9903-50a9-dc1a-607f-3451eedb87a6, 'name': SearchDatastore_Task, 'duration_secs': 0.01007} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.058901] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0fc0ad71-b48b-4c3d-b1f0-fa628c6fc667 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.159326] env[61974]: DEBUG oslo_vmware.api [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.364490] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Preparing fetch location {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1016.364877] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Fetch image to [datastore2] OSTACK_IMG_c0e3663b-0fc7-4d14-97c9-3b60ee410be5/OSTACK_IMG_c0e3663b-0fc7-4d14-97c9-3b60ee410be5.vmdk {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1016.365114] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Downloading stream optimized image 7303d3bd-2aee-4964-855b-6068bc1100ed to [datastore2] OSTACK_IMG_c0e3663b-0fc7-4d14-97c9-3b60ee410be5/OSTACK_IMG_c0e3663b-0fc7-4d14-97c9-3b60ee410be5.vmdk on the data store datastore2 as vApp {{(pid=61974) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1016.365320] env[61974]: DEBUG nova.virt.vmwareapi.images [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Downloading image file data 7303d3bd-2aee-4964-855b-6068bc1100ed to the ESX as VM named 'OSTACK_IMG_c0e3663b-0fc7-4d14-97c9-3b60ee410be5' {{(pid=61974) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1016.391312] env[61974]: DEBUG nova.compute.manager [req-353d5e2f-6249-4c33-9ab6-4ef78d256501 req-b18982a0-1207-4f33-89aa-59d27bc38c2e service nova] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Received event network-vif-deleted-23ed5afc-e506-4637-9fdd-6a2630023f66 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1016.434479] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c299f2b8-d7c6-4ce0-8b7d-c8b6109e14a6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.445892] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8127926-0eac-4f27-8eaa-688c457e4ae3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.481763] env[61974]: DEBUG oslo_vmware.rw_handles [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1016.481763] env[61974]: value = "resgroup-9" [ 1016.481763] env[61974]: _type = "ResourcePool" [ 1016.481763] env[61974]: }. {{(pid=61974) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1016.482629] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1fba107-5744-4edf-8629-4eab0bf86375 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.485543] env[61974]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-f4fb9edc-1078-43b0-beec-bc1a74edbafb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.513415] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79631b75-0358-4f2e-bd94-bfe80bafd291 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.518026] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379322, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.519152] env[61974]: DEBUG oslo_vmware.rw_handles [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lease: (returnval){ [ 1016.519152] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5231e714-eb95-4c04-ed86-867c2f1d4504" [ 1016.519152] env[61974]: _type = "HttpNfcLease" [ 1016.519152] env[61974]: } obtained for vApp import into resource pool (val){ [ 1016.519152] env[61974]: value = "resgroup-9" [ 1016.519152] env[61974]: _type = "ResourcePool" [ 1016.519152] env[61974]: }. {{(pid=61974) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1016.519632] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the lease: (returnval){ [ 1016.519632] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5231e714-eb95-4c04-ed86-867c2f1d4504" [ 1016.519632] env[61974]: _type = "HttpNfcLease" [ 1016.519632] env[61974]: } to be ready. {{(pid=61974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1016.531757] env[61974]: DEBUG nova.compute.provider_tree [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.537344] env[61974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1016.537344] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5231e714-eb95-4c04-ed86-867c2f1d4504" [ 1016.537344] env[61974]: _type = "HttpNfcLease" [ 1016.537344] env[61974]: } is initializing. {{(pid=61974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1016.656029] env[61974]: DEBUG oslo_vmware.api [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.821282] env[61974]: DEBUG oslo_vmware.rw_handles [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bdf98e-9040-8f2b-59c9-7286adcc8a56/disk-0.vmdk. {{(pid=61974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1016.822504] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee3e41b3-6be9-49ff-9be6-1a38493058b3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.832034] env[61974]: DEBUG oslo_vmware.rw_handles [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bdf98e-9040-8f2b-59c9-7286adcc8a56/disk-0.vmdk is in state: ready. {{(pid=61974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1016.832034] env[61974]: ERROR oslo_vmware.rw_handles [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bdf98e-9040-8f2b-59c9-7286adcc8a56/disk-0.vmdk due to incomplete transfer. [ 1016.832034] env[61974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e55db0c7-3d08-4106-9ed8-b44f14dc5570 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.842044] env[61974]: DEBUG oslo_vmware.rw_handles [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bdf98e-9040-8f2b-59c9-7286adcc8a56/disk-0.vmdk. {{(pid=61974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1016.843682] env[61974]: DEBUG nova.virt.vmwareapi.images [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Uploaded image a500e9bf-1653-49ec-a28f-1c976c4ead03 to the Glance image server {{(pid=61974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1016.847073] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Destroying the VM {{(pid=61974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1016.849169] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a71efa78-de40-44fa-9289-278bd18525f5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.856039] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1016.856039] env[61974]: value = "task-1379324" [ 1016.856039] env[61974]: _type = "Task" [ 1016.856039] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.865365] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379324, 'name': Destroy_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.996776] env[61974]: DEBUG oslo_vmware.api [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379322, 'name': PowerOnVM_Task, 'duration_secs': 0.702178} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.997147] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1016.997418] env[61974]: INFO nova.compute.manager [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Took 10.10 seconds to spawn the instance on the hypervisor. [ 1016.997646] env[61974]: DEBUG nova.compute.manager [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1016.999759] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13fc9fbc-cc34-467b-af0b-57fe3c33f3ed {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.030977] env[61974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1017.030977] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5231e714-eb95-4c04-ed86-867c2f1d4504" [ 1017.030977] env[61974]: _type = "HttpNfcLease" [ 1017.030977] env[61974]: } is initializing. {{(pid=61974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1017.035233] env[61974]: DEBUG nova.scheduler.client.report [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1017.153974] env[61974]: DEBUG oslo_vmware.api [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.367539] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379324, 'name': Destroy_Task} progress is 33%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.522035] env[61974]: INFO nova.compute.manager [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Took 22.73 seconds to build instance. [ 1017.532808] env[61974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1017.532808] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5231e714-eb95-4c04-ed86-867c2f1d4504" [ 1017.532808] env[61974]: _type = "HttpNfcLease" [ 1017.532808] env[61974]: } is ready. {{(pid=61974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1017.533249] env[61974]: DEBUG oslo_vmware.rw_handles [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1017.533249] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5231e714-eb95-4c04-ed86-867c2f1d4504" [ 1017.533249] env[61974]: _type = "HttpNfcLease" [ 1017.533249] env[61974]: }. {{(pid=61974) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1017.533948] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89672998-8a40-4f5a-a287-791e58322541 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.543509] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.407s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.544111] env[61974]: DEBUG nova.compute.manager [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1017.546883] env[61974]: DEBUG oslo_vmware.rw_handles [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52df4255-14fb-8d3d-6c41-6b908549f704/disk-0.vmdk from lease info. {{(pid=61974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1017.547126] env[61974]: DEBUG oslo_vmware.rw_handles [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52df4255-14fb-8d3d-6c41-6b908549f704/disk-0.vmdk. {{(pid=61974) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1017.549325] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.680s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.549597] env[61974]: DEBUG nova.objects.instance [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lazy-loading 'resources' on Instance uuid 450956aa-cc55-481c-acf6-287abc8b8efe {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1017.617856] env[61974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d3701b03-4a60-4e3f-b794-1d775de1b175 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.655411] env[61974]: DEBUG oslo_vmware.api [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.867574] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379324, 'name': Destroy_Task, 'duration_secs': 0.872471} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.867958] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Destroyed the VM [ 1017.868143] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Deleting Snapshot of the VM instance {{(pid=61974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1017.868409] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8aabe8c0-b6fd-43b5-86ab-0b4aa466ecd3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.876740] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1017.876740] env[61974]: value = "task-1379325" [ 1017.876740] env[61974]: _type = "Task" [ 1017.876740] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.885976] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379325, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.021947] env[61974]: DEBUG oslo_concurrency.lockutils [None req-06825258-2d23-44fe-af04-067abd364c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "a2fbbc4a-92da-4917-a73e-a37a8980c62c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.244s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.051105] env[61974]: DEBUG nova.compute.utils [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1018.054174] env[61974]: DEBUG nova.compute.manager [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1018.054378] env[61974]: DEBUG nova.network.neutron [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1018.130239] env[61974]: DEBUG nova.policy [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '139f2fab7d4c492ab0d6fb16ea947457', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4757d24b61794cfcaefff2ad44e02b74', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1018.160773] env[61974]: DEBUG oslo_vmware.api [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.378980] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33086c6a-92e2-4068-9a8e-ec2f5c4cd0f0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.404557] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5720a818-b982-4e34-96f3-bafa98f7f30f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.408123] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379325, 'name': RemoveSnapshot_Task, 'duration_secs': 0.442714} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.410853] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Deleted Snapshot of the VM instance {{(pid=61974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1018.411288] env[61974]: DEBUG nova.compute.manager [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1018.412483] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726a5583-f46a-4bbf-8b57-1788d47e398b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.450749] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0225ab-537a-4fcf-9ec4-ea285d6b0d0e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.470019] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91116bfb-c98e-4efe-9477-917c9bee2ba8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.486291] env[61974]: DEBUG nova.compute.provider_tree [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1018.558637] env[61974]: DEBUG nova.network.neutron [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Successfully created port: eb8d6299-95d1-4112-8a74-4fc223060135 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1018.561839] env[61974]: DEBUG nova.compute.manager [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1018.593067] env[61974]: DEBUG oslo_vmware.rw_handles [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Completed reading data from the image iterator. {{(pid=61974) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1018.593382] env[61974]: DEBUG oslo_vmware.rw_handles [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52df4255-14fb-8d3d-6c41-6b908549f704/disk-0.vmdk. {{(pid=61974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1018.599188] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27022f5-d438-4981-b24a-c35827f2b188 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.606212] env[61974]: DEBUG oslo_vmware.rw_handles [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52df4255-14fb-8d3d-6c41-6b908549f704/disk-0.vmdk is in state: ready. {{(pid=61974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1018.606463] env[61974]: DEBUG oslo_vmware.rw_handles [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52df4255-14fb-8d3d-6c41-6b908549f704/disk-0.vmdk. {{(pid=61974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1018.606811] env[61974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-e607df62-926d-468a-9165-58cdbbbe84e2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.656420] env[61974]: DEBUG oslo_vmware.api [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379318, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.959080] env[61974]: DEBUG oslo_vmware.rw_handles [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52df4255-14fb-8d3d-6c41-6b908549f704/disk-0.vmdk. {{(pid=61974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1018.959410] env[61974]: INFO nova.virt.vmwareapi.images [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Downloaded image file data 7303d3bd-2aee-4964-855b-6068bc1100ed [ 1018.960244] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4e19d4-1b9e-4082-aea3-11f0afe9cf34 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.968659] env[61974]: INFO nova.compute.manager [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Shelve offloading [ 1018.970447] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1018.970726] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c2f32d5-5d4e-4c7a-b1eb-3749d5353b5e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.987109] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38812b40-9a67-4398-97d9-ef97d662b746 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.988809] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1018.988809] env[61974]: value = "task-1379326" [ 1018.988809] env[61974]: _type = "Task" [ 1018.988809] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.989672] env[61974]: DEBUG nova.scheduler.client.report [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1019.002555] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] VM already powered off {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1019.002783] env[61974]: DEBUG nova.compute.manager [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1019.003603] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603a1537-72d8-429f-a395-a7db14c562e3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.010960] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "refresh_cache-cc048c22-81e0-40fb-9a06-9b84a54e4891" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.011178] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired lock "refresh_cache-cc048c22-81e0-40fb-9a06-9b84a54e4891" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.011362] env[61974]: DEBUG nova.network.neutron [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1019.035434] env[61974]: INFO nova.virt.vmwareapi.images [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] The imported VM was unregistered [ 1019.038556] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Caching image {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1019.038835] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Creating directory with path [datastore2] devstack-image-cache_base/7303d3bd-2aee-4964-855b-6068bc1100ed {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1019.040329] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1d355d4-a8d3-4729-a2a2-ff3d021d4a55 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.065150] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Created directory with path [datastore2] devstack-image-cache_base/7303d3bd-2aee-4964-855b-6068bc1100ed {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1019.065405] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_c0e3663b-0fc7-4d14-97c9-3b60ee410be5/OSTACK_IMG_c0e3663b-0fc7-4d14-97c9-3b60ee410be5.vmdk to [datastore2] devstack-image-cache_base/7303d3bd-2aee-4964-855b-6068bc1100ed/7303d3bd-2aee-4964-855b-6068bc1100ed.vmdk. {{(pid=61974) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1019.069242] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-5cb3002f-8866-4170-86f6-ee7866327d5e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.079647] env[61974]: DEBUG oslo_concurrency.lockutils [None req-284cfd5a-b8e7-43a8-86ed-118c49201443 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "a2fbbc4a-92da-4917-a73e-a37a8980c62c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.079914] env[61974]: DEBUG oslo_concurrency.lockutils [None req-284cfd5a-b8e7-43a8-86ed-118c49201443 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "a2fbbc4a-92da-4917-a73e-a37a8980c62c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.080118] env[61974]: DEBUG nova.compute.manager [None req-284cfd5a-b8e7-43a8-86ed-118c49201443 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1019.082514] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d4f5c7-fcea-47ec-acc1-457033e67197 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.085366] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1019.085366] env[61974]: value = "task-1379328" [ 1019.085366] env[61974]: _type = "Task" [ 1019.085366] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.091266] env[61974]: DEBUG nova.compute.manager [None req-284cfd5a-b8e7-43a8-86ed-118c49201443 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61974) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1019.091893] env[61974]: DEBUG nova.objects.instance [None req-284cfd5a-b8e7-43a8-86ed-118c49201443 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lazy-loading 'flavor' on Instance uuid a2fbbc4a-92da-4917-a73e-a37a8980c62c {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1019.099468] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379328, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.156767] env[61974]: DEBUG oslo_vmware.api [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379318, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.494966] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.945s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.496822] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0fc0ad71-b48b-4c3d-b1f0-fa628c6fc667 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 3.438s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.530428] env[61974]: INFO nova.scheduler.client.report [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Deleted allocations for instance 450956aa-cc55-481c-acf6-287abc8b8efe [ 1019.576687] env[61974]: DEBUG nova.compute.manager [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1019.597932] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379328, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.599804] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-284cfd5a-b8e7-43a8-86ed-118c49201443 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1019.600137] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a907c3a4-7186-452a-9a6f-3da315c0200a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.609232] env[61974]: DEBUG oslo_vmware.api [None req-284cfd5a-b8e7-43a8-86ed-118c49201443 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1019.609232] env[61974]: value = "task-1379329" [ 1019.609232] env[61974]: _type = "Task" [ 1019.609232] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.611548] env[61974]: DEBUG nova.virt.hardware [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1019.612239] env[61974]: DEBUG nova.virt.hardware [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1019.612713] env[61974]: DEBUG nova.virt.hardware [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1019.612713] env[61974]: DEBUG nova.virt.hardware [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1019.612914] env[61974]: DEBUG nova.virt.hardware [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1019.613206] env[61974]: DEBUG nova.virt.hardware [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1019.613569] env[61974]: DEBUG nova.virt.hardware [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1019.613800] env[61974]: DEBUG nova.virt.hardware [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1019.614039] env[61974]: DEBUG nova.virt.hardware [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1019.614292] env[61974]: DEBUG nova.virt.hardware [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1019.614584] env[61974]: DEBUG nova.virt.hardware [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1019.618750] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a52195-ffcd-44d7-b490-35dd5fa68db9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.632591] env[61974]: DEBUG oslo_vmware.api [None req-284cfd5a-b8e7-43a8-86ed-118c49201443 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379329, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.633945] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d2ad64-f7ac-403c-921d-26e70be98c82 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.659379] env[61974]: DEBUG oslo_vmware.api [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379318, 'name': ReconfigVM_Task, 'duration_secs': 5.802927} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.659750] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Releasing lock "ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1019.660025] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Reconfigured VM to detach interface {{(pid=61974) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1019.956765] env[61974]: DEBUG nova.network.neutron [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Updating instance_info_cache with network_info: [{"id": "8f7e5444-15c1-48c6-8635-b93eb2ee90df", "address": "fa:16:3e:d7:6a:cc", "network": {"id": "615a7a34-a392-45bd-ba4d-7b39605e520b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432153827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d104a741ebad47748ae5646356589fce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f7e5444-15", "ovs_interfaceid": "8f7e5444-15c1-48c6-8635-b93eb2ee90df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.038885] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0e9684aa-c307-48f3-871e-9b0b4dddd66c tempest-ListServerFiltersTestJSON-1849518156 tempest-ListServerFiltersTestJSON-1849518156-project-member] Lock "450956aa-cc55-481c-acf6-287abc8b8efe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.223s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.101210] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379328, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.128192] env[61974]: DEBUG oslo_vmware.api [None req-284cfd5a-b8e7-43a8-86ed-118c49201443 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379329, 'name': PowerOffVM_Task, 'duration_secs': 0.374252} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.128192] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-284cfd5a-b8e7-43a8-86ed-118c49201443 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1020.128192] env[61974]: DEBUG nova.compute.manager [None req-284cfd5a-b8e7-43a8-86ed-118c49201443 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1020.128192] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd434490-a4ce-46de-a758-99c9a140374e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.279515] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304333a4-9dda-4f52-bccb-77e0bdd4ff01 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.288326] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39dbbdf1-ceae-4882-883d-d218171af514 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.323053] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ae27dd-d55f-488a-8985-956d5cb9c64d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.332896] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c035c3b4-d818-4bc0-a0dc-40c1da3d80c2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.349794] env[61974]: DEBUG nova.compute.provider_tree [None req-0fc0ad71-b48b-4c3d-b1f0-fa628c6fc667 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1020.453784] env[61974]: DEBUG nova.compute.manager [req-3fe0d6d8-e30c-4a9c-9430-e34f007e4427 req-4dcc1e9e-59b1-4921-9abf-54d0a3f321f0 service nova] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Received event network-vif-plugged-eb8d6299-95d1-4112-8a74-4fc223060135 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1020.453784] env[61974]: DEBUG oslo_concurrency.lockutils [req-3fe0d6d8-e30c-4a9c-9430-e34f007e4427 req-4dcc1e9e-59b1-4921-9abf-54d0a3f321f0 service nova] Acquiring lock "912ff104-9c97-4486-99c8-71a35180abb0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.453784] env[61974]: DEBUG oslo_concurrency.lockutils [req-3fe0d6d8-e30c-4a9c-9430-e34f007e4427 req-4dcc1e9e-59b1-4921-9abf-54d0a3f321f0 service nova] Lock "912ff104-9c97-4486-99c8-71a35180abb0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.454200] env[61974]: DEBUG oslo_concurrency.lockutils [req-3fe0d6d8-e30c-4a9c-9430-e34f007e4427 req-4dcc1e9e-59b1-4921-9abf-54d0a3f321f0 service nova] Lock "912ff104-9c97-4486-99c8-71a35180abb0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.454200] env[61974]: DEBUG nova.compute.manager [req-3fe0d6d8-e30c-4a9c-9430-e34f007e4427 req-4dcc1e9e-59b1-4921-9abf-54d0a3f321f0 service nova] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] No waiting events found dispatching network-vif-plugged-eb8d6299-95d1-4112-8a74-4fc223060135 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1020.454304] env[61974]: WARNING nova.compute.manager [req-3fe0d6d8-e30c-4a9c-9430-e34f007e4427 req-4dcc1e9e-59b1-4921-9abf-54d0a3f321f0 service nova] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Received unexpected event network-vif-plugged-eb8d6299-95d1-4112-8a74-4fc223060135 for instance with vm_state building and task_state spawning. [ 1020.460612] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Releasing lock "refresh_cache-cc048c22-81e0-40fb-9a06-9b84a54e4891" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.597985] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379328, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.647151] env[61974]: DEBUG oslo_concurrency.lockutils [None req-284cfd5a-b8e7-43a8-86ed-118c49201443 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "a2fbbc4a-92da-4917-a73e-a37a8980c62c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.566s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.738226] env[61974]: DEBUG nova.network.neutron [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Successfully updated port: eb8d6299-95d1-4112-8a74-4fc223060135 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1020.853339] env[61974]: DEBUG nova.scheduler.client.report [None req-0fc0ad71-b48b-4c3d-b1f0-fa628c6fc667 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1020.962161] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1020.963171] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3147a1-b891-46b0-bb93-5cb85b0d30ec {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.975259] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1020.975600] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-abf268d3-3062-431c-81d9-301cd8add7fa {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.057903] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1021.058231] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1021.058344] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Deleting the datastore file [datastore1] cc048c22-81e0-40fb-9a06-9b84a54e4891 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1021.058626] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d22fdd15-962d-44be-829f-86da85b5805c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.067953] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1021.067953] env[61974]: value = "task-1379331" [ 1021.067953] env[61974]: _type = "Task" [ 1021.067953] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.078958] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379331, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.096822] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379328, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.240403] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "refresh_cache-912ff104-9c97-4486-99c8-71a35180abb0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1021.240860] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired lock "refresh_cache-912ff104-9c97-4486-99c8-71a35180abb0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.240860] env[61974]: DEBUG nova.network.neutron [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1021.410211] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1021.410211] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquired lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.410656] env[61974]: DEBUG nova.network.neutron [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1021.579531] env[61974]: DEBUG oslo_vmware.api [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379331, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.330625} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.579797] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1021.580101] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1021.580322] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1021.597748] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379328, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.506464} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.601024] env[61974]: INFO nova.virt.vmwareapi.ds_util [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_c0e3663b-0fc7-4d14-97c9-3b60ee410be5/OSTACK_IMG_c0e3663b-0fc7-4d14-97c9-3b60ee410be5.vmdk to [datastore2] devstack-image-cache_base/7303d3bd-2aee-4964-855b-6068bc1100ed/7303d3bd-2aee-4964-855b-6068bc1100ed.vmdk. [ 1021.601314] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Cleaning up location [datastore2] OSTACK_IMG_c0e3663b-0fc7-4d14-97c9-3b60ee410be5 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1021.601500] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_c0e3663b-0fc7-4d14-97c9-3b60ee410be5 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1021.602146] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-afae1ccf-bd27-4724-a5e4-ec01aa068447 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.609722] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1021.609722] env[61974]: value = "task-1379332" [ 1021.609722] env[61974]: _type = "Task" [ 1021.609722] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.610744] env[61974]: INFO nova.scheduler.client.report [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Deleted allocations for instance cc048c22-81e0-40fb-9a06-9b84a54e4891 [ 1021.623122] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379332, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.777441] env[61974]: DEBUG nova.network.neutron [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1021.870818] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0fc0ad71-b48b-4c3d-b1f0-fa628c6fc667 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.374s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.947174] env[61974]: DEBUG nova.network.neutron [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Updating instance_info_cache with network_info: [{"id": "eb8d6299-95d1-4112-8a74-4fc223060135", "address": "fa:16:3e:a8:f1:32", "network": {"id": "bbb72d46-05ed-4ca3-80a8-0e9b6e6ccb5d", "bridge": "br-int", "label": "tempest-ServersTestJSON-148366285-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4757d24b61794cfcaefff2ad44e02b74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb8d6299-95", "ovs_interfaceid": "eb8d6299-95d1-4112-8a74-4fc223060135", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.953077] env[61974]: INFO nova.compute.manager [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Rebuilding instance [ 1022.013075] env[61974]: DEBUG nova.compute.manager [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1022.014462] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510f6c1f-f678-467e-9c9b-821210028194 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.119254] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.119629] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.119701] env[61974]: DEBUG nova.objects.instance [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lazy-loading 'resources' on Instance uuid cc048c22-81e0-40fb-9a06-9b84a54e4891 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1022.128967] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379332, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.053585} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.129536] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1022.129780] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7303d3bd-2aee-4964-855b-6068bc1100ed/7303d3bd-2aee-4964-855b-6068bc1100ed.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1022.130400] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7303d3bd-2aee-4964-855b-6068bc1100ed/7303d3bd-2aee-4964-855b-6068bc1100ed.vmdk to [datastore2] 6e698472-b4c0-45dc-869d-d51bbe00552c/6e698472-b4c0-45dc-869d-d51bbe00552c.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1022.132430] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-99be90dc-9e3f-4302-aa70-bb2f5a2a12b1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.145940] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1022.145940] env[61974]: value = "task-1379333" [ 1022.145940] env[61974]: _type = "Task" [ 1022.145940] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.159021] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379333, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.211301] env[61974]: INFO nova.network.neutron [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Port 915eddb2-5b76-46da-8c84-a99ed89ca777 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1022.211301] env[61974]: DEBUG nova.network.neutron [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Updating instance_info_cache with network_info: [{"id": "a342d02a-7577-428c-946f-e5725112ceec", "address": "fa:16:3e:99:e8:62", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa342d02a-75", "ovs_interfaceid": "a342d02a-7577-428c-946f-e5725112ceec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.272853] env[61974]: DEBUG nova.compute.manager [req-fc001c19-f978-4a90-a5f8-e1f21810db1c req-bb22a867-69ef-4b9f-9412-fa4f62607e1e service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Received event network-changed-a342d02a-7577-428c-946f-e5725112ceec {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1022.272853] env[61974]: DEBUG nova.compute.manager [req-fc001c19-f978-4a90-a5f8-e1f21810db1c req-bb22a867-69ef-4b9f-9412-fa4f62607e1e service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Refreshing instance network info cache due to event network-changed-a342d02a-7577-428c-946f-e5725112ceec. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1022.272853] env[61974]: DEBUG oslo_concurrency.lockutils [req-fc001c19-f978-4a90-a5f8-e1f21810db1c req-bb22a867-69ef-4b9f-9412-fa4f62607e1e service nova] Acquiring lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1022.433209] env[61974]: INFO nova.scheduler.client.report [None req-0fc0ad71-b48b-4c3d-b1f0-fa628c6fc667 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Deleted allocation for migration 0d48f5de-f68d-4bff-830f-7e38a2227b8a [ 1022.451535] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Releasing lock "refresh_cache-912ff104-9c97-4486-99c8-71a35180abb0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1022.451879] env[61974]: DEBUG nova.compute.manager [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Instance network_info: |[{"id": "eb8d6299-95d1-4112-8a74-4fc223060135", "address": "fa:16:3e:a8:f1:32", "network": {"id": "bbb72d46-05ed-4ca3-80a8-0e9b6e6ccb5d", "bridge": "br-int", "label": "tempest-ServersTestJSON-148366285-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4757d24b61794cfcaefff2ad44e02b74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb8d6299-95", "ovs_interfaceid": "eb8d6299-95d1-4112-8a74-4fc223060135", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1022.452548] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:f1:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4728adca-2846-416a-91a3-deb898faf1f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb8d6299-95d1-4112-8a74-4fc223060135', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1022.462670] env[61974]: DEBUG oslo.service.loopingcall [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1022.462933] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1022.463615] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d067bc5-86fc-4815-adf8-ad370c1dbdc2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.484771] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1022.484771] env[61974]: value = "task-1379334" [ 1022.484771] env[61974]: _type = "Task" [ 1022.484771] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.495307] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379334, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.531195] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1022.532122] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-773b84b7-1256-40a6-9ff1-c8ab693096ca {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.540776] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1022.540776] env[61974]: value = "task-1379335" [ 1022.540776] env[61974]: _type = "Task" [ 1022.540776] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.551709] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379335, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.624924] env[61974]: DEBUG nova.objects.instance [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lazy-loading 'numa_topology' on Instance uuid cc048c22-81e0-40fb-9a06-9b84a54e4891 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1022.627868] env[61974]: DEBUG nova.compute.manager [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Received event network-changed-eb8d6299-95d1-4112-8a74-4fc223060135 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1022.628086] env[61974]: DEBUG nova.compute.manager [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Refreshing instance network info cache due to event network-changed-eb8d6299-95d1-4112-8a74-4fc223060135. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1022.628318] env[61974]: DEBUG oslo_concurrency.lockutils [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] Acquiring lock "refresh_cache-912ff104-9c97-4486-99c8-71a35180abb0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1022.628473] env[61974]: DEBUG oslo_concurrency.lockutils [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] Acquired lock "refresh_cache-912ff104-9c97-4486-99c8-71a35180abb0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.628679] env[61974]: DEBUG nova.network.neutron [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Refreshing network info cache for port eb8d6299-95d1-4112-8a74-4fc223060135 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1022.665365] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379333, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.713985] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Releasing lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1022.716542] env[61974]: DEBUG oslo_concurrency.lockutils [req-fc001c19-f978-4a90-a5f8-e1f21810db1c req-bb22a867-69ef-4b9f-9412-fa4f62607e1e service nova] Acquired lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.716783] env[61974]: DEBUG nova.network.neutron [req-fc001c19-f978-4a90-a5f8-e1f21810db1c req-bb22a867-69ef-4b9f-9412-fa4f62607e1e service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Refreshing network info cache for port a342d02a-7577-428c-946f-e5725112ceec {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1022.941453] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0fc0ad71-b48b-4c3d-b1f0-fa628c6fc667 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "abe0168a-e838-468a-a223-7c2a64497c0c" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.942s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.000152] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379334, 'name': CreateVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.019679] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "interface-9c26e20b-dfc4-432c-a851-499dbea18f01-915eddb2-5b76-46da-8c84-a99ed89ca777" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.019679] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "interface-9c26e20b-dfc4-432c-a851-499dbea18f01-915eddb2-5b76-46da-8c84-a99ed89ca777" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.020019] env[61974]: DEBUG nova.objects.instance [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lazy-loading 'flavor' on Instance uuid 9c26e20b-dfc4-432c-a851-499dbea18f01 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1023.055975] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] VM already powered off {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1023.056608] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1023.058160] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ada5e83f-36bd-4b54-9772-c235c2024474 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.068795] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1023.069447] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0a3b0c78-7754-42a3-ae16-13b8a7697944 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.084117] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0556bdf0-a75d-47f4-8f09-a912c9ecdee6 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "cc048c22-81e0-40fb-9a06-9b84a54e4891" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.130950] env[61974]: DEBUG nova.objects.base [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=61974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1023.161543] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379333, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.187848] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1023.188160] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1023.188406] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Deleting the datastore file [datastore2] a2fbbc4a-92da-4917-a73e-a37a8980c62c {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1023.189104] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f313c510-cf52-4a4f-8852-189427f00840 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.198323] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1023.198323] env[61974]: value = "task-1379337" [ 1023.198323] env[61974]: _type = "Task" [ 1023.198323] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.208947] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379337, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.219773] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fda96bee-ca41-42de-bdac-b553873100e0 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "interface-ceb0dd02-6441-4923-99f6-73f8eab86fe5-915eddb2-5b76-46da-8c84-a99ed89ca777" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.176s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.359445] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8202f7-54d5-480d-aebb-266fbe593655 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.368688] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd4c8d3-cf78-4c30-bf4f-0a6549711276 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.402879] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d01ad0-5844-459d-b65f-f15904df231d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.415032] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d71827fe-c90b-4dd5-8489-6aa5230b9674 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.436117] env[61974]: DEBUG nova.compute.provider_tree [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.498489] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379334, 'name': CreateVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.661093] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379333, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.717438] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379337, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.805291] env[61974]: DEBUG nova.network.neutron [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Updated VIF entry in instance network info cache for port eb8d6299-95d1-4112-8a74-4fc223060135. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1023.805761] env[61974]: DEBUG nova.network.neutron [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Updating instance_info_cache with network_info: [{"id": "eb8d6299-95d1-4112-8a74-4fc223060135", "address": "fa:16:3e:a8:f1:32", "network": {"id": "bbb72d46-05ed-4ca3-80a8-0e9b6e6ccb5d", "bridge": "br-int", "label": "tempest-ServersTestJSON-148366285-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4757d24b61794cfcaefff2ad44e02b74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb8d6299-95", "ovs_interfaceid": "eb8d6299-95d1-4112-8a74-4fc223060135", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.810085] env[61974]: DEBUG nova.objects.instance [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lazy-loading 'pci_requests' on Instance uuid 9c26e20b-dfc4-432c-a851-499dbea18f01 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1023.867457] env[61974]: DEBUG nova.network.neutron [req-fc001c19-f978-4a90-a5f8-e1f21810db1c req-bb22a867-69ef-4b9f-9412-fa4f62607e1e service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Updated VIF entry in instance network info cache for port a342d02a-7577-428c-946f-e5725112ceec. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1023.867838] env[61974]: DEBUG nova.network.neutron [req-fc001c19-f978-4a90-a5f8-e1f21810db1c req-bb22a867-69ef-4b9f-9412-fa4f62607e1e service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Updating instance_info_cache with network_info: [{"id": "a342d02a-7577-428c-946f-e5725112ceec", "address": "fa:16:3e:99:e8:62", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa342d02a-75", "ovs_interfaceid": "a342d02a-7577-428c-946f-e5725112ceec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.939707] env[61974]: DEBUG nova.scheduler.client.report [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1023.999687] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379334, 'name': CreateVM_Task, 'duration_secs': 1.429685} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.999874] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1024.000616] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.000796] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.001155] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1024.001439] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cec71039-31eb-49ef-a90b-8589be5780a5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.007241] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1024.007241] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52d53398-82df-316b-3819-8bf04a978783" [ 1024.007241] env[61974]: _type = "Task" [ 1024.007241] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.017216] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d53398-82df-316b-3819-8bf04a978783, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.047494] env[61974]: DEBUG oslo_concurrency.lockutils [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "abe0168a-e838-468a-a223-7c2a64497c0c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.047939] env[61974]: DEBUG oslo_concurrency.lockutils [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "abe0168a-e838-468a-a223-7c2a64497c0c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.048341] env[61974]: DEBUG oslo_concurrency.lockutils [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "abe0168a-e838-468a-a223-7c2a64497c0c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.048601] env[61974]: DEBUG oslo_concurrency.lockutils [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "abe0168a-e838-468a-a223-7c2a64497c0c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.048900] env[61974]: DEBUG oslo_concurrency.lockutils [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "abe0168a-e838-468a-a223-7c2a64497c0c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.051656] env[61974]: INFO nova.compute.manager [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Terminating instance [ 1024.053434] env[61974]: DEBUG nova.compute.manager [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1024.053640] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1024.054503] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e87d64a2-59aa-49c5-ba92-a34c3993cfe5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.063798] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1024.064109] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69036de1-8c2f-4e20-bed0-cf3eb4daddb2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.072900] env[61974]: DEBUG oslo_vmware.api [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1024.072900] env[61974]: value = "task-1379338" [ 1024.072900] env[61974]: _type = "Task" [ 1024.072900] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.083808] env[61974]: DEBUG oslo_vmware.api [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379338, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.160827] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379333, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.211218] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379337, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.308775] env[61974]: DEBUG oslo_concurrency.lockutils [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] Releasing lock "refresh_cache-912ff104-9c97-4486-99c8-71a35180abb0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.309243] env[61974]: DEBUG nova.compute.manager [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Received event network-vif-unplugged-8f7e5444-15c1-48c6-8635-b93eb2ee90df {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1024.309472] env[61974]: DEBUG oslo_concurrency.lockutils [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] Acquiring lock "cc048c22-81e0-40fb-9a06-9b84a54e4891-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.309696] env[61974]: DEBUG oslo_concurrency.lockutils [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] Lock "cc048c22-81e0-40fb-9a06-9b84a54e4891-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.309899] env[61974]: DEBUG oslo_concurrency.lockutils [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] Lock "cc048c22-81e0-40fb-9a06-9b84a54e4891-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.310184] env[61974]: DEBUG nova.compute.manager [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] No waiting events found dispatching network-vif-unplugged-8f7e5444-15c1-48c6-8635-b93eb2ee90df {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1024.310384] env[61974]: DEBUG nova.compute.manager [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Received event network-vif-unplugged-8f7e5444-15c1-48c6-8635-b93eb2ee90df for instance with task_state deleting. {{(pid=61974) _process_instance_event /opt/stack/nova/nova/compute/manager.py:10909}} [ 1024.310560] env[61974]: DEBUG nova.compute.manager [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Received event network-changed-8f7e5444-15c1-48c6-8635-b93eb2ee90df {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1024.310789] env[61974]: DEBUG nova.compute.manager [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Refreshing instance network info cache due to event network-changed-8f7e5444-15c1-48c6-8635-b93eb2ee90df. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1024.310995] env[61974]: DEBUG oslo_concurrency.lockutils [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] Acquiring lock "refresh_cache-cc048c22-81e0-40fb-9a06-9b84a54e4891" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.311191] env[61974]: DEBUG oslo_concurrency.lockutils [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] Acquired lock "refresh_cache-cc048c22-81e0-40fb-9a06-9b84a54e4891" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.311365] env[61974]: DEBUG nova.network.neutron [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Refreshing network info cache for port 8f7e5444-15c1-48c6-8635-b93eb2ee90df {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1024.312984] env[61974]: DEBUG nova.objects.base [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Object Instance<9c26e20b-dfc4-432c-a851-499dbea18f01> lazy-loaded attributes: flavor,pci_requests {{(pid=61974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1024.313220] env[61974]: DEBUG nova.network.neutron [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1024.371576] env[61974]: DEBUG oslo_concurrency.lockutils [req-fc001c19-f978-4a90-a5f8-e1f21810db1c req-bb22a867-69ef-4b9f-9412-fa4f62607e1e service nova] Releasing lock "refresh_cache-ceb0dd02-6441-4923-99f6-73f8eab86fe5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.381889] env[61974]: DEBUG nova.policy [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91e186033f624742a59502ddf87167f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '102785ae1c584cdb925a55afc3412fb9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1024.445502] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.326s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.521866] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d53398-82df-316b-3819-8bf04a978783, 'name': SearchDatastore_Task, 'duration_secs': 0.029513} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.522312] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.522517] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1024.522765] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.522915] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.523114] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1024.523512] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9eb3c892-b8f2-4e92-9a19-c035a320a8d3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.536098] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1024.536283] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1024.537245] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5485e703-d764-4e2d-8634-fbfff2f1340b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.545541] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1024.545541] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]523cdefd-4c2e-3ba9-186f-3226fd766707" [ 1024.545541] env[61974]: _type = "Task" [ 1024.545541] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.555662] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]523cdefd-4c2e-3ba9-186f-3226fd766707, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.585261] env[61974]: DEBUG oslo_vmware.api [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379338, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.612453] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.612453] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.663375] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379333, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.505276} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.663654] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7303d3bd-2aee-4964-855b-6068bc1100ed/7303d3bd-2aee-4964-855b-6068bc1100ed.vmdk to [datastore2] 6e698472-b4c0-45dc-869d-d51bbe00552c/6e698472-b4c0-45dc-869d-d51bbe00552c.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1024.664510] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed37177-607b-4853-9450-b4738126e35b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.689295] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] 6e698472-b4c0-45dc-869d-d51bbe00552c/6e698472-b4c0-45dc-869d-d51bbe00552c.vmdk or device None with type streamOptimized {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1024.689635] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-055ce38d-aca6-4b96-bae8-d85ddffdb7ad {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.713624] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379337, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.513232} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.714967] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1024.715191] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1024.715377] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1024.718013] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1024.718013] env[61974]: value = "task-1379339" [ 1024.718013] env[61974]: _type = "Task" [ 1024.718013] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.727879] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379339, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.955504] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d20fc1c6-e53c-40d4-9db3-20a28d96fd27 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "cc048c22-81e0-40fb-9a06-9b84a54e4891" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.308s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.956536] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0556bdf0-a75d-47f4-8f09-a912c9ecdee6 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "cc048c22-81e0-40fb-9a06-9b84a54e4891" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.873s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.956773] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0556bdf0-a75d-47f4-8f09-a912c9ecdee6 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "cc048c22-81e0-40fb-9a06-9b84a54e4891-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.957026] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0556bdf0-a75d-47f4-8f09-a912c9ecdee6 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "cc048c22-81e0-40fb-9a06-9b84a54e4891-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.957209] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0556bdf0-a75d-47f4-8f09-a912c9ecdee6 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "cc048c22-81e0-40fb-9a06-9b84a54e4891-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.960469] env[61974]: INFO nova.compute.manager [None req-0556bdf0-a75d-47f4-8f09-a912c9ecdee6 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Terminating instance [ 1024.963903] env[61974]: DEBUG nova.compute.manager [None req-0556bdf0-a75d-47f4-8f09-a912c9ecdee6 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1024.964121] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0556bdf0-a75d-47f4-8f09-a912c9ecdee6 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1024.964395] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ee0cabad-7089-4f15-bf3a-9fd80153bca3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.976851] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844b5799-1d23-4dd2-8771-fc003c77673a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.011518] env[61974]: WARNING nova.virt.vmwareapi.vmops [None req-0556bdf0-a75d-47f4-8f09-a912c9ecdee6 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cc048c22-81e0-40fb-9a06-9b84a54e4891 could not be found. [ 1025.011644] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0556bdf0-a75d-47f4-8f09-a912c9ecdee6 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1025.011838] env[61974]: INFO nova.compute.manager [None req-0556bdf0-a75d-47f4-8f09-a912c9ecdee6 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1025.012109] env[61974]: DEBUG oslo.service.loopingcall [None req-0556bdf0-a75d-47f4-8f09-a912c9ecdee6 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1025.014573] env[61974]: DEBUG nova.compute.manager [-] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1025.014679] env[61974]: DEBUG nova.network.neutron [-] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1025.039134] env[61974]: DEBUG nova.compute.manager [req-c4317915-aadb-47f9-9ba0-156c2916535c req-0d6f4e62-1bb5-4d42-8522-8a0e369d52c7 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Received event network-changed-a2b829bf-e2cb-41c7-a840-499beb350683 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1025.039134] env[61974]: DEBUG nova.compute.manager [req-c4317915-aadb-47f9-9ba0-156c2916535c req-0d6f4e62-1bb5-4d42-8522-8a0e369d52c7 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Refreshing instance network info cache due to event network-changed-a2b829bf-e2cb-41c7-a840-499beb350683. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1025.039134] env[61974]: DEBUG oslo_concurrency.lockutils [req-c4317915-aadb-47f9-9ba0-156c2916535c req-0d6f4e62-1bb5-4d42-8522-8a0e369d52c7 service nova] Acquiring lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1025.039264] env[61974]: DEBUG oslo_concurrency.lockutils [req-c4317915-aadb-47f9-9ba0-156c2916535c req-0d6f4e62-1bb5-4d42-8522-8a0e369d52c7 service nova] Acquired lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.039421] env[61974]: DEBUG nova.network.neutron [req-c4317915-aadb-47f9-9ba0-156c2916535c req-0d6f4e62-1bb5-4d42-8522-8a0e369d52c7 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Refreshing network info cache for port a2b829bf-e2cb-41c7-a840-499beb350683 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1025.058396] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]523cdefd-4c2e-3ba9-186f-3226fd766707, 'name': SearchDatastore_Task, 'duration_secs': 0.01785} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.061382] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa2728b3-4bc5-44e0-bdd8-e3f57926bcce {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.067485] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1025.067485] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52eabc52-1dd0-0738-178b-78368fbb4f74" [ 1025.067485] env[61974]: _type = "Task" [ 1025.067485] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.077129] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52eabc52-1dd0-0738-178b-78368fbb4f74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.085032] env[61974]: DEBUG oslo_vmware.api [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379338, 'name': PowerOffVM_Task, 'duration_secs': 0.666039} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.085824] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1025.086064] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1025.086361] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58e45fcf-45d1-49c7-9a14-7414d224281a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.088383] env[61974]: DEBUG nova.network.neutron [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Updated VIF entry in instance network info cache for port 8f7e5444-15c1-48c6-8635-b93eb2ee90df. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1025.088754] env[61974]: DEBUG nova.network.neutron [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Updating instance_info_cache with network_info: [{"id": "8f7e5444-15c1-48c6-8635-b93eb2ee90df", "address": "fa:16:3e:d7:6a:cc", "network": {"id": "615a7a34-a392-45bd-ba4d-7b39605e520b", "bridge": null, "label": "tempest-DeleteServersTestJSON-1432153827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d104a741ebad47748ae5646356589fce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap8f7e5444-15", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.114875] env[61974]: DEBUG nova.compute.manager [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1025.179624] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1025.179914] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1025.179914] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Deleting the datastore file [datastore2] abe0168a-e838-468a-a223-7c2a64497c0c {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1025.181142] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c703449-0496-407b-b31a-e6e27a89e3c1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.187343] env[61974]: DEBUG oslo_vmware.api [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1025.187343] env[61974]: value = "task-1379341" [ 1025.187343] env[61974]: _type = "Task" [ 1025.187343] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.196749] env[61974]: DEBUG oslo_vmware.api [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379341, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.231541] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379339, 'name': ReconfigVM_Task, 'duration_secs': 0.281129} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.231821] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Reconfigured VM instance instance-0000005c to attach disk [datastore2] 6e698472-b4c0-45dc-869d-d51bbe00552c/6e698472-b4c0-45dc-869d-d51bbe00552c.vmdk or device None with type streamOptimized {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1025.232469] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-af3d41b1-ae83-488f-98d7-fb1b9f081191 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.242411] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1025.242411] env[61974]: value = "task-1379342" [ 1025.242411] env[61974]: _type = "Task" [ 1025.242411] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.254834] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379342, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.418571] env[61974]: DEBUG oslo_concurrency.lockutils [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.418857] env[61974]: DEBUG oslo_concurrency.lockutils [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.419110] env[61974]: INFO nova.compute.manager [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Shelving [ 1025.578110] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52eabc52-1dd0-0738-178b-78368fbb4f74, 'name': SearchDatastore_Task, 'duration_secs': 0.010889} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.580551] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1025.580826] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 912ff104-9c97-4486-99c8-71a35180abb0/912ff104-9c97-4486-99c8-71a35180abb0.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1025.581116] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a0bbd92-0a16-449d-88b5-2b9d78d31608 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.589237] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1025.589237] env[61974]: value = "task-1379343" [ 1025.589237] env[61974]: _type = "Task" [ 1025.589237] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.592578] env[61974]: DEBUG oslo_concurrency.lockutils [req-4448478c-d88d-4d20-a0f3-eba0d1911a6b req-4c70d5a9-354c-4e10-9e6e-dc9d3818e7b0 service nova] Releasing lock "refresh_cache-cc048c22-81e0-40fb-9a06-9b84a54e4891" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1025.598208] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379343, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.638757] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.639148] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.640721] env[61974]: INFO nova.compute.claims [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1025.703778] env[61974]: DEBUG oslo_vmware.api [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379341, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.759509] env[61974]: DEBUG nova.network.neutron [-] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.760921] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379342, 'name': Rename_Task, 'duration_secs': 0.475731} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.763400] env[61974]: DEBUG nova.virt.hardware [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1025.763687] env[61974]: DEBUG nova.virt.hardware [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1025.763857] env[61974]: DEBUG nova.virt.hardware [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1025.764141] env[61974]: DEBUG nova.virt.hardware [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1025.764465] env[61974]: DEBUG nova.virt.hardware [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1025.764548] env[61974]: DEBUG nova.virt.hardware [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1025.764782] env[61974]: DEBUG nova.virt.hardware [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1025.764992] env[61974]: DEBUG nova.virt.hardware [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1025.765224] env[61974]: DEBUG nova.virt.hardware [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1025.765401] env[61974]: DEBUG nova.virt.hardware [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1025.765745] env[61974]: DEBUG nova.virt.hardware [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1025.766211] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1025.766877] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e350f1-1bdb-4ffa-a850-09f5254e96ac {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.770612] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c795a85f-ed10-4e32-956f-903a0af3a3a4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.779140] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c26c06-0181-4251-9e97-6c5787df3927 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.789491] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1025.789491] env[61974]: value = "task-1379344" [ 1025.789491] env[61974]: _type = "Task" [ 1025.789491] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.802500] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:a5:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c791d09c-1086-4ee1-bcde-6ca7d259cabd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e3d592f0-8ee9-4b5c-9397-cf3da1294c61', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1025.810569] env[61974]: DEBUG oslo.service.loopingcall [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1025.812294] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1025.815430] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-412c87cb-347b-41b0-be4c-58bb0e3a0c8a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.835143] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379344, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.841614] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1025.841614] env[61974]: value = "task-1379345" [ 1025.841614] env[61974]: _type = "Task" [ 1025.841614] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.850108] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379345, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.906849] env[61974]: DEBUG nova.network.neutron [req-c4317915-aadb-47f9-9ba0-156c2916535c req-0d6f4e62-1bb5-4d42-8522-8a0e369d52c7 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Updated VIF entry in instance network info cache for port a2b829bf-e2cb-41c7-a840-499beb350683. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1025.907588] env[61974]: DEBUG nova.network.neutron [req-c4317915-aadb-47f9-9ba0-156c2916535c req-0d6f4e62-1bb5-4d42-8522-8a0e369d52c7 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Updating instance_info_cache with network_info: [{"id": "a2b829bf-e2cb-41c7-a840-499beb350683", "address": "fa:16:3e:4a:5e:1d", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2b829bf-e2", "ovs_interfaceid": "a2b829bf-e2cb-41c7-a840-499beb350683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.926933] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1025.927922] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f259b2a0-ec50-4933-a576-17991bf446bf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.936154] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 1025.936154] env[61974]: value = "task-1379346" [ 1025.936154] env[61974]: _type = "Task" [ 1025.936154] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.946330] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379346, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.018258] env[61974]: DEBUG nova.network.neutron [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Successfully updated port: 915eddb2-5b76-46da-8c84-a99ed89ca777 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1026.100482] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379343, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.202381] env[61974]: DEBUG oslo_vmware.api [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379341, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.587983} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.202787] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1026.202847] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1026.203055] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1026.203496] env[61974]: INFO nova.compute.manager [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Took 2.15 seconds to destroy the instance on the hypervisor. [ 1026.204025] env[61974]: DEBUG oslo.service.loopingcall [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1026.204130] env[61974]: DEBUG nova.compute.manager [-] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1026.204262] env[61974]: DEBUG nova.network.neutron [-] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1026.262437] env[61974]: INFO nova.compute.manager [-] [instance: cc048c22-81e0-40fb-9a06-9b84a54e4891] Took 1.25 seconds to deallocate network for instance. [ 1026.301754] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379344, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.351624] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379345, 'name': CreateVM_Task} progress is 25%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.410909] env[61974]: DEBUG oslo_concurrency.lockutils [req-c4317915-aadb-47f9-9ba0-156c2916535c req-0d6f4e62-1bb5-4d42-8522-8a0e369d52c7 service nova] Releasing lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1026.446597] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379346, 'name': PowerOffVM_Task, 'duration_secs': 0.281374} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.447338] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1026.447901] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88dd11b1-1907-4792-a0e4-1e2c3015adf9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.469040] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18501c3-7ddb-44c2-999e-cf733525d59e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.521424] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.521735] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquired lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.522075] env[61974]: DEBUG nova.network.neutron [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1026.601243] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379343, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.741628} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.601591] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 912ff104-9c97-4486-99c8-71a35180abb0/912ff104-9c97-4486-99c8-71a35180abb0.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1026.601814] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1026.602098] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d253f92e-e1c2-4dd4-9b18-a739b3b00d01 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.609666] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1026.609666] env[61974]: value = "task-1379347" [ 1026.609666] env[61974]: _type = "Task" [ 1026.609666] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.619243] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379347, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.803300] env[61974]: DEBUG oslo_vmware.api [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379344, 'name': PowerOnVM_Task, 'duration_secs': 0.647589} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.803589] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1026.803785] env[61974]: INFO nova.compute.manager [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Took 15.59 seconds to spawn the instance on the hypervisor. [ 1026.803965] env[61974]: DEBUG nova.compute.manager [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1026.804885] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d12a5e4-1493-45e7-b908-3ffd85c5acb5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.842578] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c05753-b6ec-41b8-9757-4166718b2d81 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.856368] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379345, 'name': CreateVM_Task, 'duration_secs': 0.962775} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.857820] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1026.858777] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.858951] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.859322] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1026.860279] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e073b9-2c2d-4369-9491-73d58dfd8cc8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.863189] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0a3a488-f477-477e-85ac-26737663676c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.869923] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1026.869923] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52f5cf67-99b8-c796-d85b-14f92125bad7" [ 1026.869923] env[61974]: _type = "Task" [ 1026.869923] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.896365] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928cd475-81d4-4e65-bf74-15a31119fcaa {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.905353] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52f5cf67-99b8-c796-d85b-14f92125bad7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.906540] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a71836cf-9cf8-494e-8236-a7584c48c893 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.919814] env[61974]: DEBUG nova.compute.provider_tree [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.973120] env[61974]: DEBUG nova.network.neutron [-] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.980952] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Creating Snapshot of the VM instance {{(pid=61974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1026.981301] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4f2d67d3-f80e-4943-8a0a-3ca7f6d6fa0a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.990219] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 1026.990219] env[61974]: value = "task-1379348" [ 1026.990219] env[61974]: _type = "Task" [ 1026.990219] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.999094] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379348, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.064865] env[61974]: WARNING nova.network.neutron [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] a3cca736-a69c-4d05-a3fd-386cf3c4bee5 already exists in list: networks containing: ['a3cca736-a69c-4d05-a3fd-386cf3c4bee5']. ignoring it [ 1027.120372] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379347, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071578} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.120642] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1027.121427] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-565e4f26-e575-440e-a706-c936639f4647 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.146245] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 912ff104-9c97-4486-99c8-71a35180abb0/912ff104-9c97-4486-99c8-71a35180abb0.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1027.146537] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc57c84f-b3e5-43a9-acbe-eb52c704584d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.166819] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1027.166819] env[61974]: value = "task-1379349" [ 1027.166819] env[61974]: _type = "Task" [ 1027.166819] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.174894] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379349, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.295834] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0556bdf0-a75d-47f4-8f09-a912c9ecdee6 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "cc048c22-81e0-40fb-9a06-9b84a54e4891" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.339s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.323704] env[61974]: INFO nova.compute.manager [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Took 25.41 seconds to build instance. [ 1027.384426] env[61974]: DEBUG nova.network.neutron [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Updating instance_info_cache with network_info: [{"id": "a2b829bf-e2cb-41c7-a840-499beb350683", "address": "fa:16:3e:4a:5e:1d", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2b829bf-e2", "ovs_interfaceid": "a2b829bf-e2cb-41c7-a840-499beb350683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "915eddb2-5b76-46da-8c84-a99ed89ca777", "address": "fa:16:3e:d3:e1:45", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap915eddb2-5b", "ovs_interfaceid": "915eddb2-5b76-46da-8c84-a99ed89ca777", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.400357] env[61974]: DEBUG nova.compute.manager [req-ee407e73-f9ab-4042-83c2-1934ba5453c0 req-92ee2695-d09d-412f-8347-cadf5d2196f9 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Received event network-vif-plugged-915eddb2-5b76-46da-8c84-a99ed89ca777 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1027.400621] env[61974]: DEBUG oslo_concurrency.lockutils [req-ee407e73-f9ab-4042-83c2-1934ba5453c0 req-92ee2695-d09d-412f-8347-cadf5d2196f9 service nova] Acquiring lock "9c26e20b-dfc4-432c-a851-499dbea18f01-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.400869] env[61974]: DEBUG oslo_concurrency.lockutils [req-ee407e73-f9ab-4042-83c2-1934ba5453c0 req-92ee2695-d09d-412f-8347-cadf5d2196f9 service nova] Lock "9c26e20b-dfc4-432c-a851-499dbea18f01-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.401062] env[61974]: DEBUG oslo_concurrency.lockutils [req-ee407e73-f9ab-4042-83c2-1934ba5453c0 req-92ee2695-d09d-412f-8347-cadf5d2196f9 service nova] Lock "9c26e20b-dfc4-432c-a851-499dbea18f01-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.401706] env[61974]: DEBUG nova.compute.manager [req-ee407e73-f9ab-4042-83c2-1934ba5453c0 req-92ee2695-d09d-412f-8347-cadf5d2196f9 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] No waiting events found dispatching network-vif-plugged-915eddb2-5b76-46da-8c84-a99ed89ca777 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1027.401706] env[61974]: WARNING nova.compute.manager [req-ee407e73-f9ab-4042-83c2-1934ba5453c0 req-92ee2695-d09d-412f-8347-cadf5d2196f9 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Received unexpected event network-vif-plugged-915eddb2-5b76-46da-8c84-a99ed89ca777 for instance with vm_state active and task_state None. [ 1027.401706] env[61974]: DEBUG nova.compute.manager [req-ee407e73-f9ab-4042-83c2-1934ba5453c0 req-92ee2695-d09d-412f-8347-cadf5d2196f9 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Received event network-changed-915eddb2-5b76-46da-8c84-a99ed89ca777 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1027.401908] env[61974]: DEBUG nova.compute.manager [req-ee407e73-f9ab-4042-83c2-1934ba5453c0 req-92ee2695-d09d-412f-8347-cadf5d2196f9 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Refreshing instance network info cache due to event network-changed-915eddb2-5b76-46da-8c84-a99ed89ca777. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1027.401908] env[61974]: DEBUG oslo_concurrency.lockutils [req-ee407e73-f9ab-4042-83c2-1934ba5453c0 req-92ee2695-d09d-412f-8347-cadf5d2196f9 service nova] Acquiring lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.410016] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52f5cf67-99b8-c796-d85b-14f92125bad7, 'name': SearchDatastore_Task, 'duration_secs': 0.065252} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.410383] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.410646] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1027.410892] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.411058] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.411250] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1027.411525] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07ebc7bc-636f-44cc-a432-f871271a2f8b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.423653] env[61974]: DEBUG nova.scheduler.client.report [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1027.427465] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1027.427598] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1027.428724] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8abdbc3e-1806-40ba-ac37-df19c1a74412 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.435213] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1027.435213] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52f5bcac-28f2-c9db-1134-65fd8d11adce" [ 1027.435213] env[61974]: _type = "Task" [ 1027.435213] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.445090] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52f5bcac-28f2-c9db-1134-65fd8d11adce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.475778] env[61974]: INFO nova.compute.manager [-] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Took 1.27 seconds to deallocate network for instance. [ 1027.501708] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379348, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.678804] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379349, 'name': ReconfigVM_Task, 'duration_secs': 0.374156} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.679159] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 912ff104-9c97-4486-99c8-71a35180abb0/912ff104-9c97-4486-99c8-71a35180abb0.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1027.679973] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-63973f12-9c24-440c-8b1f-ba03c8352989 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.687037] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1027.687037] env[61974]: value = "task-1379350" [ 1027.687037] env[61974]: _type = "Task" [ 1027.687037] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.696047] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379350, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.825845] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ec632edc-04d1-4b0c-bcdd-ab7299495973 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "6e698472-b4c0-45dc-869d-d51bbe00552c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.947s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.887719] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Releasing lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.888594] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.888788] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquired lock "9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.889233] env[61974]: DEBUG oslo_concurrency.lockutils [req-ee407e73-f9ab-4042-83c2-1934ba5453c0 req-92ee2695-d09d-412f-8347-cadf5d2196f9 service nova] Acquired lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.889509] env[61974]: DEBUG nova.network.neutron [req-ee407e73-f9ab-4042-83c2-1934ba5453c0 req-92ee2695-d09d-412f-8347-cadf5d2196f9 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Refreshing network info cache for port 915eddb2-5b76-46da-8c84-a99ed89ca777 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1027.891479] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba7eb1c-74ec-496c-9d5b-6e45a9dfca79 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.910245] env[61974]: DEBUG nova.virt.hardware [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1027.910550] env[61974]: DEBUG nova.virt.hardware [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1027.910718] env[61974]: DEBUG nova.virt.hardware [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1027.910909] env[61974]: DEBUG nova.virt.hardware [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1027.911079] env[61974]: DEBUG nova.virt.hardware [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1027.911244] env[61974]: DEBUG nova.virt.hardware [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1027.911455] env[61974]: DEBUG nova.virt.hardware [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1027.911618] env[61974]: DEBUG nova.virt.hardware [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1027.911790] env[61974]: DEBUG nova.virt.hardware [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1027.911961] env[61974]: DEBUG nova.virt.hardware [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1027.912189] env[61974]: DEBUG nova.virt.hardware [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1027.919516] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Reconfiguring VM to attach interface {{(pid=61974) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1027.920395] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85f4a92a-3adc-469c-86f5-63938a23f63a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.936077] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.297s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.936611] env[61974]: DEBUG nova.compute.manager [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1027.954369] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52f5bcac-28f2-c9db-1134-65fd8d11adce, 'name': SearchDatastore_Task, 'duration_secs': 0.012708} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.956769] env[61974]: DEBUG oslo_vmware.api [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 1027.956769] env[61974]: value = "task-1379351" [ 1027.956769] env[61974]: _type = "Task" [ 1027.956769] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.957070] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9587abc-2efa-4448-96af-4ddb7ee74a45 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.969885] env[61974]: DEBUG oslo_vmware.api [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379351, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.971427] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1027.971427] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52f01b8b-cdaf-d1c1-ac4f-86fcace13f81" [ 1027.971427] env[61974]: _type = "Task" [ 1027.971427] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.983279] env[61974]: DEBUG oslo_concurrency.lockutils [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.983535] env[61974]: DEBUG oslo_concurrency.lockutils [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.983790] env[61974]: DEBUG oslo_concurrency.lockutils [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.986752] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52f01b8b-cdaf-d1c1-ac4f-86fcace13f81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.002482] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379348, 'name': CreateSnapshot_Task, 'duration_secs': 0.997003} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.002650] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Created Snapshot of the VM instance {{(pid=61974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1028.003558] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc83bb2-8370-4d4b-9280-6314f57ba8b1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.015446] env[61974]: INFO nova.scheduler.client.report [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Deleted allocations for instance abe0168a-e838-468a-a223-7c2a64497c0c [ 1028.197814] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379350, 'name': Rename_Task, 'duration_secs': 0.247134} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.198109] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1028.198370] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7d789df8-0822-4943-8788-965e21f1c50d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.205971] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1028.205971] env[61974]: value = "task-1379352" [ 1028.205971] env[61974]: _type = "Task" [ 1028.205971] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.214495] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379352, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.442195] env[61974]: DEBUG nova.compute.utils [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1028.445307] env[61974]: DEBUG nova.compute.manager [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1028.445567] env[61974]: DEBUG nova.network.neutron [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1028.470437] env[61974]: DEBUG oslo_vmware.api [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379351, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.484684] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52f01b8b-cdaf-d1c1-ac4f-86fcace13f81, 'name': SearchDatastore_Task, 'duration_secs': 0.013203} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.487157] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.487485] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] a2fbbc4a-92da-4917-a73e-a37a8980c62c/a2fbbc4a-92da-4917-a73e-a37a8980c62c.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1028.487774] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5e2bd7fe-7197-42d8-bc77-05b6a3f59064 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.496830] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1028.496830] env[61974]: value = "task-1379353" [ 1028.496830] env[61974]: _type = "Task" [ 1028.496830] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.507686] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379353, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.517800] env[61974]: DEBUG nova.policy [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6173db476e814cbaa6b3278cfa527bbb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7dae05232e0041dba49b0432d64d82d2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1028.525930] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Creating linked-clone VM from snapshot {{(pid=61974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1028.528626] env[61974]: DEBUG oslo_concurrency.lockutils [None req-597078da-dc1d-451a-a6c1-80752bd261b8 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "abe0168a-e838-468a-a223-7c2a64497c0c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.481s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.529518] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-bd55d7be-d28e-4de8-83a4-8d6bed3b8db0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.542043] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 1028.542043] env[61974]: value = "task-1379354" [ 1028.542043] env[61974]: _type = "Task" [ 1028.542043] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.552445] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379354, 'name': CloneVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.579348] env[61974]: DEBUG oslo_concurrency.lockutils [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "f475d963-0c09-4115-885a-04e28895df14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.579686] env[61974]: DEBUG oslo_concurrency.lockutils [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "f475d963-0c09-4115-885a-04e28895df14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.720158] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379352, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.751336] env[61974]: DEBUG nova.network.neutron [req-ee407e73-f9ab-4042-83c2-1934ba5453c0 req-92ee2695-d09d-412f-8347-cadf5d2196f9 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Updated VIF entry in instance network info cache for port 915eddb2-5b76-46da-8c84-a99ed89ca777. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1028.751985] env[61974]: DEBUG nova.network.neutron [req-ee407e73-f9ab-4042-83c2-1934ba5453c0 req-92ee2695-d09d-412f-8347-cadf5d2196f9 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Updating instance_info_cache with network_info: [{"id": "a2b829bf-e2cb-41c7-a840-499beb350683", "address": "fa:16:3e:4a:5e:1d", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2b829bf-e2", "ovs_interfaceid": "a2b829bf-e2cb-41c7-a840-499beb350683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "915eddb2-5b76-46da-8c84-a99ed89ca777", "address": "fa:16:3e:d3:e1:45", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap915eddb2-5b", "ovs_interfaceid": "915eddb2-5b76-46da-8c84-a99ed89ca777", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.754107] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dd306420-cd1f-4012-bc52-8aa2c9c842da tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "b1fa5433-8f26-48db-a19d-d1e11245fb44" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.754107] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dd306420-cd1f-4012-bc52-8aa2c9c842da tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "b1fa5433-8f26-48db-a19d-d1e11245fb44" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.814108] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "6e698472-b4c0-45dc-869d-d51bbe00552c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.814589] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "6e698472-b4c0-45dc-869d-d51bbe00552c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.814897] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "6e698472-b4c0-45dc-869d-d51bbe00552c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.815207] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "6e698472-b4c0-45dc-869d-d51bbe00552c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.815479] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "6e698472-b4c0-45dc-869d-d51bbe00552c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.819273] env[61974]: INFO nova.compute.manager [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Terminating instance [ 1028.822441] env[61974]: DEBUG nova.compute.manager [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1028.822748] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1028.823907] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72531dbc-536a-459e-a6c4-09ed007a7564 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.834955] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1028.835260] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ac9988a-cba5-4022-8cdd-619dfd18c783 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.844263] env[61974]: DEBUG oslo_vmware.api [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1028.844263] env[61974]: value = "task-1379355" [ 1028.844263] env[61974]: _type = "Task" [ 1028.844263] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.856141] env[61974]: DEBUG oslo_vmware.api [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379355, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.951153] env[61974]: DEBUG nova.compute.manager [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1028.976837] env[61974]: DEBUG nova.network.neutron [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Successfully created port: 62349265-e925-44c7-8158-8bfcb7fc0478 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1028.979537] env[61974]: DEBUG oslo_vmware.api [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379351, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.012238] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379353, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.055426] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379354, 'name': CloneVM_Task} progress is 94%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.081699] env[61974]: DEBUG nova.compute.manager [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1029.219729] env[61974]: DEBUG oslo_vmware.api [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379352, 'name': PowerOnVM_Task, 'duration_secs': 0.54078} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.220141] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1029.220455] env[61974]: INFO nova.compute.manager [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Took 9.64 seconds to spawn the instance on the hypervisor. [ 1029.220731] env[61974]: DEBUG nova.compute.manager [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1029.221966] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61be2722-b222-4c51-949b-5a877e7ce438 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.257032] env[61974]: DEBUG oslo_concurrency.lockutils [req-ee407e73-f9ab-4042-83c2-1934ba5453c0 req-92ee2695-d09d-412f-8347-cadf5d2196f9 service nova] Releasing lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.257240] env[61974]: DEBUG nova.compute.manager [req-ee407e73-f9ab-4042-83c2-1934ba5453c0 req-92ee2695-d09d-412f-8347-cadf5d2196f9 service nova] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Received event network-vif-deleted-f2f9e10a-4e37-47fa-8040-638e6376acc6 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1029.258167] env[61974]: INFO nova.compute.manager [None req-dd306420-cd1f-4012-bc52-8aa2c9c842da tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Detaching volume 7e510873-51bc-41b6-8678-b1220c4a2013 [ 1029.306469] env[61974]: INFO nova.virt.block_device [None req-dd306420-cd1f-4012-bc52-8aa2c9c842da tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Attempting to driver detach volume 7e510873-51bc-41b6-8678-b1220c4a2013 from mountpoint /dev/sdb [ 1029.306812] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd306420-cd1f-4012-bc52-8aa2c9c842da tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Volume detach. Driver type: vmdk {{(pid=61974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1029.307257] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd306420-cd1f-4012-bc52-8aa2c9c842da tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293000', 'volume_id': '7e510873-51bc-41b6-8678-b1220c4a2013', 'name': 'volume-7e510873-51bc-41b6-8678-b1220c4a2013', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b1fa5433-8f26-48db-a19d-d1e11245fb44', 'attached_at': '', 'detached_at': '', 'volume_id': '7e510873-51bc-41b6-8678-b1220c4a2013', 'serial': '7e510873-51bc-41b6-8678-b1220c4a2013'} {{(pid=61974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1029.308760] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c4a06a-9481-4739-a04b-9ce0b1a67993 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.334771] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf3ec0a-1865-4f73-a611-cc7a2fd736c8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.343281] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a1d049-4f4f-42f9-b203-a5c81292abdb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.372350] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b8d25df-1e45-489c-af6e-74fff12f5c61 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.374995] env[61974]: DEBUG oslo_vmware.api [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379355, 'name': PowerOffVM_Task, 'duration_secs': 0.512052} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.375291] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1029.375462] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1029.376154] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7a266aab-f18f-445d-a4c2-909298e48b48 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.389637] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd306420-cd1f-4012-bc52-8aa2c9c842da tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] The volume has not been displaced from its original location: [datastore1] volume-7e510873-51bc-41b6-8678-b1220c4a2013/volume-7e510873-51bc-41b6-8678-b1220c4a2013.vmdk. No consolidation needed. {{(pid=61974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1029.395238] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd306420-cd1f-4012-bc52-8aa2c9c842da tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Reconfiguring VM instance instance-00000045 to detach disk 2001 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1029.395611] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6681a125-0a1f-4434-9963-da0f4193c83d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.415964] env[61974]: DEBUG oslo_vmware.api [None req-dd306420-cd1f-4012-bc52-8aa2c9c842da tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1029.415964] env[61974]: value = "task-1379357" [ 1029.415964] env[61974]: _type = "Task" [ 1029.415964] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.425058] env[61974]: DEBUG oslo_vmware.api [None req-dd306420-cd1f-4012-bc52-8aa2c9c842da tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379357, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.471395] env[61974]: DEBUG oslo_vmware.api [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379351, 'name': ReconfigVM_Task, 'duration_secs': 1.287619} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.472176] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Releasing lock "9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.472224] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Reconfigured VM to attach interface {{(pid=61974) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1029.509607] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379353, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.659633} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.509767] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] a2fbbc4a-92da-4917-a73e-a37a8980c62c/a2fbbc4a-92da-4917-a73e-a37a8980c62c.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1029.509985] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1029.510296] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-99d9e3f8-257c-4197-9f73-30d40e28bf0e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.517536] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1029.517536] env[61974]: value = "task-1379358" [ 1029.517536] env[61974]: _type = "Task" [ 1029.517536] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.525559] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379358, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.554895] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379354, 'name': CloneVM_Task} progress is 94%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.606682] env[61974]: DEBUG oslo_concurrency.lockutils [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.606994] env[61974]: DEBUG oslo_concurrency.lockutils [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.608631] env[61974]: INFO nova.compute.claims [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1029.744919] env[61974]: INFO nova.compute.manager [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Took 16.34 seconds to build instance. [ 1029.762545] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1029.762808] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1029.763127] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Deleting the datastore file [datastore2] 6e698472-b4c0-45dc-869d-d51bbe00552c {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1029.764156] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d61f688b-bdb1-4901-a140-25de692e41b6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.772849] env[61974]: DEBUG oslo_vmware.api [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1029.772849] env[61974]: value = "task-1379359" [ 1029.772849] env[61974]: _type = "Task" [ 1029.772849] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.786101] env[61974]: DEBUG oslo_vmware.api [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379359, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.926144] env[61974]: DEBUG oslo_vmware.api [None req-dd306420-cd1f-4012-bc52-8aa2c9c842da tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379357, 'name': ReconfigVM_Task, 'duration_secs': 0.442013} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.926471] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd306420-cd1f-4012-bc52-8aa2c9c842da tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Reconfigured VM instance instance-00000045 to detach disk 2001 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1029.931677] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8289239-7eb2-4d56-95b6-c7c807123e39 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.949609] env[61974]: DEBUG oslo_vmware.api [None req-dd306420-cd1f-4012-bc52-8aa2c9c842da tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1029.949609] env[61974]: value = "task-1379360" [ 1029.949609] env[61974]: _type = "Task" [ 1029.949609] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.958018] env[61974]: DEBUG oslo_vmware.api [None req-dd306420-cd1f-4012-bc52-8aa2c9c842da tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379360, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.962310] env[61974]: DEBUG nova.compute.manager [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1029.976576] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e7f1f568-1d49-42ae-89a9-913c0f9108ea tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "interface-9c26e20b-dfc4-432c-a851-499dbea18f01-915eddb2-5b76-46da-8c84-a99ed89ca777" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.957s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.989323] env[61974]: DEBUG nova.virt.hardware [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1029.989641] env[61974]: DEBUG nova.virt.hardware [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1029.989831] env[61974]: DEBUG nova.virt.hardware [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1029.990576] env[61974]: DEBUG nova.virt.hardware [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1029.990576] env[61974]: DEBUG nova.virt.hardware [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1029.990576] env[61974]: DEBUG nova.virt.hardware [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1029.990776] env[61974]: DEBUG nova.virt.hardware [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1029.990944] env[61974]: DEBUG nova.virt.hardware [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1029.991190] env[61974]: DEBUG nova.virt.hardware [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1029.991746] env[61974]: DEBUG nova.virt.hardware [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1029.991746] env[61974]: DEBUG nova.virt.hardware [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1029.992737] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7010d63-1962-4b1e-bf44-6706995c5395 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.002381] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-543ea061-730c-4d6d-bc73-948c084d6e36 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.026909] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379358, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.12324} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.027205] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1030.028016] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a412eca3-b534-4c03-90a8-c6b619692318 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.051253] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] a2fbbc4a-92da-4917-a73e-a37a8980c62c/a2fbbc4a-92da-4917-a73e-a37a8980c62c.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1030.051654] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55306b48-764a-48c5-84f0-6cf9859ea165 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.075221] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379354, 'name': CloneVM_Task, 'duration_secs': 1.425841} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.076506] env[61974]: INFO nova.virt.vmwareapi.vmops [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Created linked-clone VM from snapshot [ 1030.076842] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1030.076842] env[61974]: value = "task-1379361" [ 1030.076842] env[61974]: _type = "Task" [ 1030.076842] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.077613] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21baf5f5-a8c1-41d4-91fc-75874dde26db {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.088889] env[61974]: DEBUG nova.virt.vmwareapi.images [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Uploading image 7e8de138-c44e-4516-9083-c48e99a4114f {{(pid=61974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1030.094133] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379361, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.112074] env[61974]: DEBUG oslo_vmware.rw_handles [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1030.112074] env[61974]: value = "vm-293019" [ 1030.112074] env[61974]: _type = "VirtualMachine" [ 1030.112074] env[61974]: }. {{(pid=61974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1030.115035] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-18540450-1e0c-4264-a370-12fcb07e25f4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.124418] env[61974]: DEBUG oslo_vmware.rw_handles [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lease: (returnval){ [ 1030.124418] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5261c5ae-45f2-7d91-9b40-a96effc7455a" [ 1030.124418] env[61974]: _type = "HttpNfcLease" [ 1030.124418] env[61974]: } obtained for exporting VM: (result){ [ 1030.124418] env[61974]: value = "vm-293019" [ 1030.124418] env[61974]: _type = "VirtualMachine" [ 1030.124418] env[61974]: }. {{(pid=61974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1030.124845] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the lease: (returnval){ [ 1030.124845] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5261c5ae-45f2-7d91-9b40-a96effc7455a" [ 1030.124845] env[61974]: _type = "HttpNfcLease" [ 1030.124845] env[61974]: } to be ready. {{(pid=61974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1030.134873] env[61974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1030.134873] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5261c5ae-45f2-7d91-9b40-a96effc7455a" [ 1030.134873] env[61974]: _type = "HttpNfcLease" [ 1030.134873] env[61974]: } is initializing. {{(pid=61974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1030.247028] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d280f01c-14e9-4a49-b748-fc4481dd330e tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "912ff104-9c97-4486-99c8-71a35180abb0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.851s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.287685] env[61974]: DEBUG oslo_vmware.api [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379359, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.441131] env[61974]: DEBUG nova.compute.manager [req-c23e41b9-fe3b-46ea-a699-87ba80a45067 req-24af1efc-6731-4a96-a733-5e7d80053ad5 service nova] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Received event network-vif-plugged-62349265-e925-44c7-8158-8bfcb7fc0478 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1030.441364] env[61974]: DEBUG oslo_concurrency.lockutils [req-c23e41b9-fe3b-46ea-a699-87ba80a45067 req-24af1efc-6731-4a96-a733-5e7d80053ad5 service nova] Acquiring lock "bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.441604] env[61974]: DEBUG oslo_concurrency.lockutils [req-c23e41b9-fe3b-46ea-a699-87ba80a45067 req-24af1efc-6731-4a96-a733-5e7d80053ad5 service nova] Lock "bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.441735] env[61974]: DEBUG oslo_concurrency.lockutils [req-c23e41b9-fe3b-46ea-a699-87ba80a45067 req-24af1efc-6731-4a96-a733-5e7d80053ad5 service nova] Lock "bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.441910] env[61974]: DEBUG nova.compute.manager [req-c23e41b9-fe3b-46ea-a699-87ba80a45067 req-24af1efc-6731-4a96-a733-5e7d80053ad5 service nova] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] No waiting events found dispatching network-vif-plugged-62349265-e925-44c7-8158-8bfcb7fc0478 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1030.442308] env[61974]: WARNING nova.compute.manager [req-c23e41b9-fe3b-46ea-a699-87ba80a45067 req-24af1efc-6731-4a96-a733-5e7d80053ad5 service nova] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Received unexpected event network-vif-plugged-62349265-e925-44c7-8158-8bfcb7fc0478 for instance with vm_state building and task_state spawning. [ 1030.461321] env[61974]: DEBUG oslo_vmware.api [None req-dd306420-cd1f-4012-bc52-8aa2c9c842da tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379360, 'name': ReconfigVM_Task, 'duration_secs': 0.241836} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.461671] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd306420-cd1f-4012-bc52-8aa2c9c842da tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293000', 'volume_id': '7e510873-51bc-41b6-8678-b1220c4a2013', 'name': 'volume-7e510873-51bc-41b6-8678-b1220c4a2013', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b1fa5433-8f26-48db-a19d-d1e11245fb44', 'attached_at': '', 'detached_at': '', 'volume_id': '7e510873-51bc-41b6-8678-b1220c4a2013', 'serial': '7e510873-51bc-41b6-8678-b1220c4a2013'} {{(pid=61974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1030.555020] env[61974]: DEBUG nova.network.neutron [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Successfully updated port: 62349265-e925-44c7-8158-8bfcb7fc0478 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1030.592338] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379361, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.633533] env[61974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1030.633533] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5261c5ae-45f2-7d91-9b40-a96effc7455a" [ 1030.633533] env[61974]: _type = "HttpNfcLease" [ 1030.633533] env[61974]: } is ready. {{(pid=61974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1030.634000] env[61974]: DEBUG oslo_vmware.rw_handles [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1030.634000] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5261c5ae-45f2-7d91-9b40-a96effc7455a" [ 1030.634000] env[61974]: _type = "HttpNfcLease" [ 1030.634000] env[61974]: }. {{(pid=61974) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1030.635108] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37006434-ef26-4bfb-bb7b-07089972fd6e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.644702] env[61974]: DEBUG oslo_vmware.rw_handles [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52938b71-7326-a83f-a33a-4640ea0401ee/disk-0.vmdk from lease info. {{(pid=61974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1030.644933] env[61974]: DEBUG oslo_vmware.rw_handles [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52938b71-7326-a83f-a33a-4640ea0401ee/disk-0.vmdk for reading. {{(pid=61974) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1030.761664] env[61974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2cdbac26-864c-4d39-ac0a-9eec4a9daef6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.787761] env[61974]: DEBUG oslo_vmware.api [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379359, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.590749} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.788112] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1030.788377] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1030.788540] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1030.788756] env[61974]: INFO nova.compute.manager [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Took 1.97 seconds to destroy the instance on the hypervisor. [ 1030.789543] env[61974]: DEBUG oslo.service.loopingcall [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1030.789824] env[61974]: DEBUG nova.compute.manager [-] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1030.789944] env[61974]: DEBUG nova.network.neutron [-] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1030.897325] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1bc6c6b-ec03-4444-b0dd-fafbb89fbdb7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.905885] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e0c3fe-4652-4ded-a98c-e4054e7a610f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.942754] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "912ff104-9c97-4486-99c8-71a35180abb0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.943044] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "912ff104-9c97-4486-99c8-71a35180abb0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.943263] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "912ff104-9c97-4486-99c8-71a35180abb0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.943901] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "912ff104-9c97-4486-99c8-71a35180abb0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.944235] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "912ff104-9c97-4486-99c8-71a35180abb0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.946952] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa754977-9b96-4cd1-9df2-9f3936223b82 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.950059] env[61974]: INFO nova.compute.manager [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Terminating instance [ 1030.953840] env[61974]: DEBUG nova.compute.manager [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1030.954053] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1030.954851] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0edeb88-97d8-42b9-9da2-84f86220a3c8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.961629] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf5d983-5ee8-490b-acc8-78ba22136935 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.967488] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1030.968067] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cc273f26-3c95-4bb1-a3fc-164ff3ac4655 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.978404] env[61974]: DEBUG nova.compute.provider_tree [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1030.982936] env[61974]: DEBUG oslo_vmware.api [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1030.982936] env[61974]: value = "task-1379363" [ 1030.982936] env[61974]: _type = "Task" [ 1030.982936] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.993084] env[61974]: DEBUG oslo_vmware.api [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379363, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.022926] env[61974]: DEBUG nova.objects.instance [None req-dd306420-cd1f-4012-bc52-8aa2c9c842da tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lazy-loading 'flavor' on Instance uuid b1fa5433-8f26-48db-a19d-d1e11245fb44 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1031.058490] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "refresh_cache-bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.058490] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "refresh_cache-bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.058745] env[61974]: DEBUG nova.network.neutron [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1031.093201] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379361, 'name': ReconfigVM_Task, 'duration_secs': 0.810877} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.096404] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Reconfigured VM instance instance-0000005b to attach disk [datastore1] a2fbbc4a-92da-4917-a73e-a37a8980c62c/a2fbbc4a-92da-4917-a73e-a37a8980c62c.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1031.096404] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f045072-4ce3-47b6-adf0-b53192d39774 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.106399] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1031.106399] env[61974]: value = "task-1379364" [ 1031.106399] env[61974]: _type = "Task" [ 1031.106399] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.117427] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379364, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.216656] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "interface-9c26e20b-dfc4-432c-a851-499dbea18f01-915eddb2-5b76-46da-8c84-a99ed89ca777" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.216949] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "interface-9c26e20b-dfc4-432c-a851-499dbea18f01-915eddb2-5b76-46da-8c84-a99ed89ca777" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.484506] env[61974]: DEBUG nova.scheduler.client.report [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1031.499482] env[61974]: DEBUG oslo_vmware.api [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379363, 'name': PowerOffVM_Task, 'duration_secs': 0.2034} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.500180] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1031.500180] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1031.500381] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b84cecc7-9176-481a-930b-418814625a74 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.596403] env[61974]: DEBUG nova.network.neutron [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1031.612260] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1031.612873] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1031.612978] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Deleting the datastore file [datastore1] 912ff104-9c97-4486-99c8-71a35180abb0 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1031.613735] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7005bbd2-e2e1-462e-bdc3-fbb4feb288c6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.620032] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379364, 'name': Rename_Task, 'duration_secs': 0.212677} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.623207] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1031.623942] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de24af99-05a1-4967-bc02-aa296cb5d343 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.631316] env[61974]: DEBUG oslo_vmware.api [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1031.631316] env[61974]: value = "task-1379366" [ 1031.631316] env[61974]: _type = "Task" [ 1031.631316] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.633913] env[61974]: DEBUG nova.network.neutron [-] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.635355] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1031.635355] env[61974]: value = "task-1379367" [ 1031.635355] env[61974]: _type = "Task" [ 1031.635355] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.649915] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379367, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.650257] env[61974]: DEBUG oslo_vmware.api [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379366, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.720065] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.720065] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquired lock "9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.721203] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5329fed4-4518-463f-9343-ccf10f10d5c1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.745451] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d9dcb6-707c-44c6-8d42-0d392d5332b5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.774391] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Reconfiguring VM to detach interface {{(pid=61974) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1031.774876] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a17f7d9-3f32-4c6b-b81d-07f2f14b1d3a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.797717] env[61974]: DEBUG oslo_vmware.api [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 1031.797717] env[61974]: value = "task-1379368" [ 1031.797717] env[61974]: _type = "Task" [ 1031.797717] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.812357] env[61974]: DEBUG oslo_vmware.api [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379368, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.829226] env[61974]: DEBUG nova.network.neutron [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Updating instance_info_cache with network_info: [{"id": "62349265-e925-44c7-8158-8bfcb7fc0478", "address": "fa:16:3e:6b:58:30", "network": {"id": "b42774a0-686b-4132-a599-7cec777b9919", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1826867553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dae05232e0041dba49b0432d64d82d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62349265-e9", "ovs_interfaceid": "62349265-e925-44c7-8158-8bfcb7fc0478", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.949987] env[61974]: DEBUG oslo_concurrency.lockutils [None req-58852a67-a2a5-40b9-8d6f-ce2d749fdb72 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "b1fa5433-8f26-48db-a19d-d1e11245fb44" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.994115] env[61974]: DEBUG oslo_concurrency.lockutils [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.387s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.994832] env[61974]: DEBUG nova.compute.manager [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1032.030779] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dd306420-cd1f-4012-bc52-8aa2c9c842da tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "b1fa5433-8f26-48db-a19d-d1e11245fb44" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.277s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.032766] env[61974]: DEBUG oslo_concurrency.lockutils [None req-58852a67-a2a5-40b9-8d6f-ce2d749fdb72 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "b1fa5433-8f26-48db-a19d-d1e11245fb44" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.083s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.032992] env[61974]: DEBUG nova.compute.manager [None req-58852a67-a2a5-40b9-8d6f-ce2d749fdb72 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1032.034083] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a85ed5-ec5c-42fb-8efc-ad3e60bd0e0d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.042912] env[61974]: DEBUG nova.compute.manager [None req-58852a67-a2a5-40b9-8d6f-ce2d749fdb72 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61974) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1032.043639] env[61974]: DEBUG nova.objects.instance [None req-58852a67-a2a5-40b9-8d6f-ce2d749fdb72 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lazy-loading 'flavor' on Instance uuid b1fa5433-8f26-48db-a19d-d1e11245fb44 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1032.136748] env[61974]: INFO nova.compute.manager [-] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Took 1.35 seconds to deallocate network for instance. [ 1032.148151] env[61974]: DEBUG oslo_vmware.api [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379366, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.246581} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.149060] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1032.149368] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1032.149628] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1032.149858] env[61974]: INFO nova.compute.manager [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1032.150132] env[61974]: DEBUG oslo.service.loopingcall [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1032.150440] env[61974]: DEBUG nova.compute.manager [-] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1032.150590] env[61974]: DEBUG nova.network.neutron [-] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1032.155780] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379367, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.309591] env[61974]: DEBUG oslo_vmware.api [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379368, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.332047] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "refresh_cache-bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.332047] env[61974]: DEBUG nova.compute.manager [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Instance network_info: |[{"id": "62349265-e925-44c7-8158-8bfcb7fc0478", "address": "fa:16:3e:6b:58:30", "network": {"id": "b42774a0-686b-4132-a599-7cec777b9919", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1826867553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dae05232e0041dba49b0432d64d82d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62349265-e9", "ovs_interfaceid": "62349265-e925-44c7-8158-8bfcb7fc0478", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1032.333079] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:58:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f4399275-8e92-4448-be9e-d4984e93e89c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '62349265-e925-44c7-8158-8bfcb7fc0478', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1032.340890] env[61974]: DEBUG oslo.service.loopingcall [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1032.341269] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1032.341582] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-19dae1f4-b64d-4e46-bd93-c251539496b9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.370410] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1032.370410] env[61974]: value = "task-1379369" [ 1032.370410] env[61974]: _type = "Task" [ 1032.370410] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.385900] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379369, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.476364] env[61974]: DEBUG nova.compute.manager [req-6721905e-2a78-49b2-9f2e-294039a68227 req-281f7375-9745-433b-be17-56588b544d0e service nova] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Received event network-changed-62349265-e925-44c7-8158-8bfcb7fc0478 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1032.476682] env[61974]: DEBUG nova.compute.manager [req-6721905e-2a78-49b2-9f2e-294039a68227 req-281f7375-9745-433b-be17-56588b544d0e service nova] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Refreshing instance network info cache due to event network-changed-62349265-e925-44c7-8158-8bfcb7fc0478. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1032.476943] env[61974]: DEBUG oslo_concurrency.lockutils [req-6721905e-2a78-49b2-9f2e-294039a68227 req-281f7375-9745-433b-be17-56588b544d0e service nova] Acquiring lock "refresh_cache-bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.477220] env[61974]: DEBUG oslo_concurrency.lockutils [req-6721905e-2a78-49b2-9f2e-294039a68227 req-281f7375-9745-433b-be17-56588b544d0e service nova] Acquired lock "refresh_cache-bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.477335] env[61974]: DEBUG nova.network.neutron [req-6721905e-2a78-49b2-9f2e-294039a68227 req-281f7375-9745-433b-be17-56588b544d0e service nova] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Refreshing network info cache for port 62349265-e925-44c7-8158-8bfcb7fc0478 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1032.504114] env[61974]: DEBUG nova.compute.utils [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1032.505660] env[61974]: DEBUG nova.compute.manager [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1032.505961] env[61974]: DEBUG nova.network.neutron [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1032.549644] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-58852a67-a2a5-40b9-8d6f-ce2d749fdb72 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1032.549909] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aa89f014-6473-49c7-9c6a-352bd168df59 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.561054] env[61974]: DEBUG oslo_vmware.api [None req-58852a67-a2a5-40b9-8d6f-ce2d749fdb72 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1032.561054] env[61974]: value = "task-1379370" [ 1032.561054] env[61974]: _type = "Task" [ 1032.561054] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.563244] env[61974]: DEBUG nova.policy [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5fb3973c32a645fb82106b90ee5e33a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd104a741ebad47748ae5646356589fce', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1032.577324] env[61974]: DEBUG oslo_vmware.api [None req-58852a67-a2a5-40b9-8d6f-ce2d749fdb72 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379370, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.650516] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.651041] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.651121] env[61974]: DEBUG nova.objects.instance [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lazy-loading 'resources' on Instance uuid 6e698472-b4c0-45dc-869d-d51bbe00552c {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1032.665945] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379367, 'name': PowerOnVM_Task, 'duration_secs': 0.791224} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.666374] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1032.667401] env[61974]: DEBUG nova.compute.manager [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1032.667851] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242745e9-4358-42a7-8eb8-a04e5bea0e99 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.813348] env[61974]: DEBUG oslo_vmware.api [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379368, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.883259] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379369, 'name': CreateVM_Task, 'duration_secs': 0.414808} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.884122] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1032.884416] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.884416] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.884736] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1032.884963] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b02f7f0b-5991-443c-8b61-ec35a3e05058 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.890471] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1032.890471] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52f26f57-eee0-a942-2868-fc80900793c7" [ 1032.890471] env[61974]: _type = "Task" [ 1032.890471] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.899646] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52f26f57-eee0-a942-2868-fc80900793c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.936778] env[61974]: DEBUG nova.network.neutron [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Successfully created port: 3f49a7db-65cf-4082-b6f3-4f26169fe49a {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1033.009718] env[61974]: DEBUG nova.compute.manager [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1033.023276] env[61974]: DEBUG nova.network.neutron [-] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.078155] env[61974]: DEBUG oslo_vmware.api [None req-58852a67-a2a5-40b9-8d6f-ce2d749fdb72 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379370, 'name': PowerOffVM_Task, 'duration_secs': 0.257161} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.078155] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-58852a67-a2a5-40b9-8d6f-ce2d749fdb72 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1033.078155] env[61974]: DEBUG nova.compute.manager [None req-58852a67-a2a5-40b9-8d6f-ce2d749fdb72 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1033.078155] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a87a3a-5934-4327-afa3-250bb825cb07 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.180465] env[61974]: INFO nova.compute.manager [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] bringing vm to original state: 'stopped' [ 1033.312585] env[61974]: DEBUG oslo_vmware.api [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379368, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.370189] env[61974]: DEBUG nova.network.neutron [req-6721905e-2a78-49b2-9f2e-294039a68227 req-281f7375-9745-433b-be17-56588b544d0e service nova] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Updated VIF entry in instance network info cache for port 62349265-e925-44c7-8158-8bfcb7fc0478. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1033.370780] env[61974]: DEBUG nova.network.neutron [req-6721905e-2a78-49b2-9f2e-294039a68227 req-281f7375-9745-433b-be17-56588b544d0e service nova] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Updating instance_info_cache with network_info: [{"id": "62349265-e925-44c7-8158-8bfcb7fc0478", "address": "fa:16:3e:6b:58:30", "network": {"id": "b42774a0-686b-4132-a599-7cec777b9919", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1826867553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dae05232e0041dba49b0432d64d82d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62349265-e9", "ovs_interfaceid": "62349265-e925-44c7-8158-8bfcb7fc0478", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.388573] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0043ae-e2d1-4b94-b44d-cdef72554632 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.406008] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52f26f57-eee0-a942-2868-fc80900793c7, 'name': SearchDatastore_Task, 'duration_secs': 0.01593} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.406772] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43be761-c50e-4862-8715-0aa5d9391afc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.412533] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.412533] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1033.412533] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1033.412533] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.412533] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1033.412533] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-67290f4b-97fc-428d-a931-115dca2a8873 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.445606] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53a8a92-14e3-45d5-8c1e-5a03f3285ed8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.448377] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1033.448539] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1033.449279] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2e3dbce-4d00-4b9c-b653-ce9c171a4f57 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.455734] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1033.455734] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]522469db-63ce-c3cc-a02e-e0205ff0885c" [ 1033.455734] env[61974]: _type = "Task" [ 1033.455734] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.462235] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20cd5b7d-df00-44c3-a0ae-60c8865a89e4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.471230] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]522469db-63ce-c3cc-a02e-e0205ff0885c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.479384] env[61974]: DEBUG nova.compute.provider_tree [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1033.526089] env[61974]: INFO nova.compute.manager [-] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Took 1.38 seconds to deallocate network for instance. [ 1033.590494] env[61974]: DEBUG oslo_concurrency.lockutils [None req-58852a67-a2a5-40b9-8d6f-ce2d749fdb72 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "b1fa5433-8f26-48db-a19d-d1e11245fb44" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.558s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.815066] env[61974]: DEBUG oslo_vmware.api [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379368, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.873219] env[61974]: DEBUG oslo_concurrency.lockutils [req-6721905e-2a78-49b2-9f2e-294039a68227 req-281f7375-9745-433b-be17-56588b544d0e service nova] Releasing lock "refresh_cache-bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.873535] env[61974]: DEBUG nova.compute.manager [req-6721905e-2a78-49b2-9f2e-294039a68227 req-281f7375-9745-433b-be17-56588b544d0e service nova] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Received event network-vif-deleted-49aba758-8fda-480f-9179-23a891374764 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1033.968449] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]522469db-63ce-c3cc-a02e-e0205ff0885c, 'name': SearchDatastore_Task, 'duration_secs': 0.040987} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.969239] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa5ade77-5a91-4085-9159-48d516ce0bab {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.974829] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1033.974829] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52afb3bc-2c9d-7592-ba4e-87de51bd35ee" [ 1033.974829] env[61974]: _type = "Task" [ 1033.974829] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.984106] env[61974]: DEBUG nova.scheduler.client.report [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1033.987196] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52afb3bc-2c9d-7592-ba4e-87de51bd35ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.018306] env[61974]: DEBUG nova.compute.manager [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1034.032936] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.043344] env[61974]: DEBUG nova.virt.hardware [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1034.043606] env[61974]: DEBUG nova.virt.hardware [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1034.043829] env[61974]: DEBUG nova.virt.hardware [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1034.043951] env[61974]: DEBUG nova.virt.hardware [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1034.044116] env[61974]: DEBUG nova.virt.hardware [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1034.044272] env[61974]: DEBUG nova.virt.hardware [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1034.044578] env[61974]: DEBUG nova.virt.hardware [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1034.044851] env[61974]: DEBUG nova.virt.hardware [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1034.045069] env[61974]: DEBUG nova.virt.hardware [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1034.045258] env[61974]: DEBUG nova.virt.hardware [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1034.045446] env[61974]: DEBUG nova.virt.hardware [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1034.046454] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31920b0d-42f0-419f-a071-c3077f0e9a12 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.055914] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28205e0d-2589-46f7-8e19-c9923d8f9d81 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.187852] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "a2fbbc4a-92da-4917-a73e-a37a8980c62c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.188165] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "a2fbbc4a-92da-4917-a73e-a37a8980c62c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.188384] env[61974]: DEBUG nova.compute.manager [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1034.189439] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c258f1-2347-44e6-a71e-fa6834733a1c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.196807] env[61974]: DEBUG nova.compute.manager [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61974) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1034.198976] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1034.199269] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-58b13dea-7379-43fa-8200-37aad81a1ac7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.207037] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1034.207037] env[61974]: value = "task-1379371" [ 1034.207037] env[61974]: _type = "Task" [ 1034.207037] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.215946] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379371, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.314181] env[61974]: DEBUG oslo_vmware.api [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379368, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.417725] env[61974]: DEBUG nova.objects.instance [None req-d20cbd5b-240c-4646-8e8e-cd6672a6d30e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lazy-loading 'flavor' on Instance uuid b1fa5433-8f26-48db-a19d-d1e11245fb44 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1034.487274] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52afb3bc-2c9d-7592-ba4e-87de51bd35ee, 'name': SearchDatastore_Task, 'duration_secs': 0.026319} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.487617] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.487912] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63/bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1034.488657] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.838s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.490826] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8992df9-769f-46c7-87c0-f3ec409242c5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.493524] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.461s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.493834] env[61974]: DEBUG nova.objects.instance [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lazy-loading 'resources' on Instance uuid 912ff104-9c97-4486-99c8-71a35180abb0 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1034.502657] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1034.502657] env[61974]: value = "task-1379372" [ 1034.502657] env[61974]: _type = "Task" [ 1034.502657] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.509262] env[61974]: DEBUG nova.compute.manager [req-ce6aad61-7fb3-4c66-b96a-d6601d1fbc47 req-eec3aae0-2a17-4752-9b66-4a902805e805 service nova] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Received event network-vif-deleted-eb8d6299-95d1-4112-8a74-4fc223060135 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1034.516379] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379372, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.517518] env[61974]: INFO nova.scheduler.client.report [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Deleted allocations for instance 6e698472-b4c0-45dc-869d-d51bbe00552c [ 1034.717966] env[61974]: DEBUG oslo_vmware.api [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379371, 'name': PowerOffVM_Task, 'duration_secs': 0.189495} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.718307] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1034.718509] env[61974]: DEBUG nova.compute.manager [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1034.719438] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6c96f1-dba7-4014-9001-c9cb4e6fe17a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.732123] env[61974]: DEBUG nova.network.neutron [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Successfully updated port: 3f49a7db-65cf-4082-b6f3-4f26169fe49a {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1034.814828] env[61974]: DEBUG oslo_vmware.api [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379368, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.924102] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d20cbd5b-240c-4646-8e8e-cd6672a6d30e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "refresh_cache-b1fa5433-8f26-48db-a19d-d1e11245fb44" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.924102] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d20cbd5b-240c-4646-8e8e-cd6672a6d30e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquired lock "refresh_cache-b1fa5433-8f26-48db-a19d-d1e11245fb44" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.924641] env[61974]: DEBUG nova.network.neutron [None req-d20cbd5b-240c-4646-8e8e-cd6672a6d30e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1034.924641] env[61974]: DEBUG nova.objects.instance [None req-d20cbd5b-240c-4646-8e8e-cd6672a6d30e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lazy-loading 'info_cache' on Instance uuid b1fa5433-8f26-48db-a19d-d1e11245fb44 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1035.016345] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379372, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.025448] env[61974]: DEBUG oslo_concurrency.lockutils [None req-be49e5d2-61bd-439e-b607-f0032b603e9a tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "6e698472-b4c0-45dc-869d-d51bbe00552c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.211s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.194186] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aee13c9-941e-45e5-b172-62c048a46c35 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.203404] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a12e39-56bf-414f-b5fa-d7bb0f02ed40 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.239041] env[61974]: DEBUG oslo_concurrency.lockutils [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "refresh_cache-f475d963-0c09-4115-885a-04e28895df14" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.239041] env[61974]: DEBUG oslo_concurrency.lockutils [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired lock "refresh_cache-f475d963-0c09-4115-885a-04e28895df14" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.239275] env[61974]: DEBUG nova.network.neutron [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1035.240789] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "a2fbbc4a-92da-4917-a73e-a37a8980c62c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.053s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.243174] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0a32a5-9e0b-48b4-819c-f6a321f59841 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.255901] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac67f09-b9f7-4e51-a001-6ccd3fcdc665 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.272313] env[61974]: DEBUG nova.compute.provider_tree [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1035.315134] env[61974]: DEBUG oslo_vmware.api [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379368, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.427976] env[61974]: DEBUG nova.objects.base [None req-d20cbd5b-240c-4646-8e8e-cd6672a6d30e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=61974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1035.516353] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379372, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.575712} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.516856] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63/bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1035.517166] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1035.517513] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cfe69579-5fbd-4fef-a77d-3091344c16a6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.526486] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1035.526486] env[61974]: value = "task-1379373" [ 1035.526486] env[61974]: _type = "Task" [ 1035.526486] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.536185] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379373, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.754226] env[61974]: DEBUG oslo_concurrency.lockutils [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "5780d1d6-cd40-4b97-8a68-072c090540af" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.754390] env[61974]: DEBUG oslo_concurrency.lockutils [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "5780d1d6-cd40-4b97-8a68-072c090540af" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.755027] env[61974]: DEBUG oslo_concurrency.lockutils [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "5780d1d6-cd40-4b97-8a68-072c090540af-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.755308] env[61974]: DEBUG oslo_concurrency.lockutils [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "5780d1d6-cd40-4b97-8a68-072c090540af-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.755673] env[61974]: DEBUG oslo_concurrency.lockutils [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "5780d1d6-cd40-4b97-8a68-072c090540af-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.757146] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.757934] env[61974]: INFO nova.compute.manager [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Terminating instance [ 1035.760409] env[61974]: DEBUG nova.compute.manager [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1035.760621] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1035.761514] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72042e29-4db5-4707-b96a-f43a564a3a9c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.770388] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1035.770704] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31369da6-b4be-45d2-87c4-d9d69bfbdcc1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.774877] env[61974]: DEBUG nova.scheduler.client.report [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1035.779944] env[61974]: DEBUG oslo_vmware.api [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1035.779944] env[61974]: value = "task-1379374" [ 1035.779944] env[61974]: _type = "Task" [ 1035.779944] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.790141] env[61974]: DEBUG oslo_vmware.api [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379374, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.802926] env[61974]: DEBUG nova.network.neutron [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1035.817425] env[61974]: DEBUG oslo_vmware.api [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379368, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.018104] env[61974]: DEBUG nova.network.neutron [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Updating instance_info_cache with network_info: [{"id": "3f49a7db-65cf-4082-b6f3-4f26169fe49a", "address": "fa:16:3e:97:7b:81", "network": {"id": "615a7a34-a392-45bd-ba4d-7b39605e520b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432153827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d104a741ebad47748ae5646356589fce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f49a7db-65", "ovs_interfaceid": "3f49a7db-65cf-4082-b6f3-4f26169fe49a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.037956] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379373, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070851} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.038313] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1036.039147] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541013af-9b0e-400e-a592-657c33f7ed32 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.062493] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63/bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1036.062978] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "a2fbbc4a-92da-4917-a73e-a37a8980c62c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.063238] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "a2fbbc4a-92da-4917-a73e-a37a8980c62c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.063458] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "a2fbbc4a-92da-4917-a73e-a37a8980c62c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.063644] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "a2fbbc4a-92da-4917-a73e-a37a8980c62c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.063816] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "a2fbbc4a-92da-4917-a73e-a37a8980c62c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.065407] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbdaee63-fe99-49f1-ad1e-d08fe3c9078a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.079827] env[61974]: INFO nova.compute.manager [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Terminating instance [ 1036.084590] env[61974]: DEBUG nova.compute.manager [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1036.084798] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1036.086204] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4759e542-69b0-42d1-a151-792511038679 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.091323] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1036.091323] env[61974]: value = "task-1379375" [ 1036.091323] env[61974]: _type = "Task" [ 1036.091323] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.096702] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1036.097315] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b78bd36-9228-460c-92a0-9d419d536d95 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.103160] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379375, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.177410] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1036.177718] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1036.177959] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Deleting the datastore file [datastore1] a2fbbc4a-92da-4917-a73e-a37a8980c62c {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1036.178298] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71dd9c70-ae8e-4036-8252-38b82f361e12 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.190443] env[61974]: DEBUG oslo_vmware.api [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1036.190443] env[61974]: value = "task-1379377" [ 1036.190443] env[61974]: _type = "Task" [ 1036.190443] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.201824] env[61974]: DEBUG oslo_vmware.api [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379377, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.249068] env[61974]: DEBUG nova.network.neutron [None req-d20cbd5b-240c-4646-8e8e-cd6672a6d30e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Updating instance_info_cache with network_info: [{"id": "7f8230e7-7883-4de2-bf5f-ffa36751a171", "address": "fa:16:3e:5f:98:3d", "network": {"id": "39aeb319-53cd-43fa-bc5e-cb665f2e4707", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2053105632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a1a2f7a8ac448ca8d5e0306eefb1d97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f8230e7-78", "ovs_interfaceid": "7f8230e7-7883-4de2-bf5f-ffa36751a171", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.251469] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "e3dc39a5-4e90-472d-8b62-fd17572852f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.251865] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "e3dc39a5-4e90-472d-8b62-fd17572852f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.281926] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.788s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.285031] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.528s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.285031] env[61974]: DEBUG nova.objects.instance [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61974) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1036.297988] env[61974]: DEBUG oslo_vmware.api [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379374, 'name': PowerOffVM_Task, 'duration_secs': 0.20187} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.298311] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1036.298492] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1036.298773] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-20b49199-67ab-4cc8-85a8-070e7c443999 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.312138] env[61974]: INFO nova.scheduler.client.report [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Deleted allocations for instance 912ff104-9c97-4486-99c8-71a35180abb0 [ 1036.320065] env[61974]: DEBUG oslo_vmware.api [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379368, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.384824] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1036.385092] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1036.385296] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Deleting the datastore file [datastore2] 5780d1d6-cd40-4b97-8a68-072c090540af {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1036.385984] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-af90c082-28f9-4a5e-b6c1-880d397cd2de {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.393780] env[61974]: DEBUG oslo_vmware.api [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1036.393780] env[61974]: value = "task-1379379" [ 1036.393780] env[61974]: _type = "Task" [ 1036.393780] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.402892] env[61974]: DEBUG oslo_vmware.api [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379379, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.519432] env[61974]: DEBUG oslo_concurrency.lockutils [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Releasing lock "refresh_cache-f475d963-0c09-4115-885a-04e28895df14" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.519432] env[61974]: DEBUG nova.compute.manager [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Instance network_info: |[{"id": "3f49a7db-65cf-4082-b6f3-4f26169fe49a", "address": "fa:16:3e:97:7b:81", "network": {"id": "615a7a34-a392-45bd-ba4d-7b39605e520b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432153827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d104a741ebad47748ae5646356589fce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f49a7db-65", "ovs_interfaceid": "3f49a7db-65cf-4082-b6f3-4f26169fe49a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1036.519730] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:7b:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa09e855-8af1-419b-b78d-8ffcc94b1bfb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f49a7db-65cf-4082-b6f3-4f26169fe49a', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1036.527352] env[61974]: DEBUG oslo.service.loopingcall [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1036.527610] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f475d963-0c09-4115-885a-04e28895df14] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1036.527848] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b8e59968-9f3b-4aeb-a9f4-4d5ac0388b6f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.548036] env[61974]: DEBUG nova.compute.manager [req-68334985-8bed-4da8-83d1-e7bc5515a54e req-34d823ae-c018-445f-b5a4-4a82f69c407c service nova] [instance: f475d963-0c09-4115-885a-04e28895df14] Received event network-vif-plugged-3f49a7db-65cf-4082-b6f3-4f26169fe49a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1036.548256] env[61974]: DEBUG oslo_concurrency.lockutils [req-68334985-8bed-4da8-83d1-e7bc5515a54e req-34d823ae-c018-445f-b5a4-4a82f69c407c service nova] Acquiring lock "f475d963-0c09-4115-885a-04e28895df14-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.548382] env[61974]: DEBUG oslo_concurrency.lockutils [req-68334985-8bed-4da8-83d1-e7bc5515a54e req-34d823ae-c018-445f-b5a4-4a82f69c407c service nova] Lock "f475d963-0c09-4115-885a-04e28895df14-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.548631] env[61974]: DEBUG oslo_concurrency.lockutils [req-68334985-8bed-4da8-83d1-e7bc5515a54e req-34d823ae-c018-445f-b5a4-4a82f69c407c service nova] Lock "f475d963-0c09-4115-885a-04e28895df14-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.548822] env[61974]: DEBUG nova.compute.manager [req-68334985-8bed-4da8-83d1-e7bc5515a54e req-34d823ae-c018-445f-b5a4-4a82f69c407c service nova] [instance: f475d963-0c09-4115-885a-04e28895df14] No waiting events found dispatching network-vif-plugged-3f49a7db-65cf-4082-b6f3-4f26169fe49a {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1036.549000] env[61974]: WARNING nova.compute.manager [req-68334985-8bed-4da8-83d1-e7bc5515a54e req-34d823ae-c018-445f-b5a4-4a82f69c407c service nova] [instance: f475d963-0c09-4115-885a-04e28895df14] Received unexpected event network-vif-plugged-3f49a7db-65cf-4082-b6f3-4f26169fe49a for instance with vm_state building and task_state spawning. [ 1036.549191] env[61974]: DEBUG nova.compute.manager [req-68334985-8bed-4da8-83d1-e7bc5515a54e req-34d823ae-c018-445f-b5a4-4a82f69c407c service nova] [instance: f475d963-0c09-4115-885a-04e28895df14] Received event network-changed-3f49a7db-65cf-4082-b6f3-4f26169fe49a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1036.549391] env[61974]: DEBUG nova.compute.manager [req-68334985-8bed-4da8-83d1-e7bc5515a54e req-34d823ae-c018-445f-b5a4-4a82f69c407c service nova] [instance: f475d963-0c09-4115-885a-04e28895df14] Refreshing instance network info cache due to event network-changed-3f49a7db-65cf-4082-b6f3-4f26169fe49a. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1036.549615] env[61974]: DEBUG oslo_concurrency.lockutils [req-68334985-8bed-4da8-83d1-e7bc5515a54e req-34d823ae-c018-445f-b5a4-4a82f69c407c service nova] Acquiring lock "refresh_cache-f475d963-0c09-4115-885a-04e28895df14" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.549777] env[61974]: DEBUG oslo_concurrency.lockutils [req-68334985-8bed-4da8-83d1-e7bc5515a54e req-34d823ae-c018-445f-b5a4-4a82f69c407c service nova] Acquired lock "refresh_cache-f475d963-0c09-4115-885a-04e28895df14" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.549942] env[61974]: DEBUG nova.network.neutron [req-68334985-8bed-4da8-83d1-e7bc5515a54e req-34d823ae-c018-445f-b5a4-4a82f69c407c service nova] [instance: f475d963-0c09-4115-885a-04e28895df14] Refreshing network info cache for port 3f49a7db-65cf-4082-b6f3-4f26169fe49a {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1036.559435] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1036.559435] env[61974]: value = "task-1379380" [ 1036.559435] env[61974]: _type = "Task" [ 1036.559435] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.570660] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379380, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.601149] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379375, 'name': ReconfigVM_Task, 'duration_secs': 0.386477} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.601458] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Reconfigured VM instance instance-0000005e to attach disk [datastore2] bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63/bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1036.602217] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-27707ae2-e85c-4b42-95bb-ac3e9e761557 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.609061] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1036.609061] env[61974]: value = "task-1379381" [ 1036.609061] env[61974]: _type = "Task" [ 1036.609061] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.617665] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379381, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.700844] env[61974]: DEBUG oslo_vmware.api [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379377, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.227588} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.701079] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1036.701288] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1036.701497] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1036.701795] env[61974]: INFO nova.compute.manager [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1036.702102] env[61974]: DEBUG oslo.service.loopingcall [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1036.702333] env[61974]: DEBUG nova.compute.manager [-] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1036.702433] env[61974]: DEBUG nova.network.neutron [-] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1036.755143] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d20cbd5b-240c-4646-8e8e-cd6672a6d30e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Releasing lock "refresh_cache-b1fa5433-8f26-48db-a19d-d1e11245fb44" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.756984] env[61974]: DEBUG nova.compute.manager [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1036.821234] env[61974]: DEBUG oslo_vmware.api [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379368, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.824967] env[61974]: DEBUG oslo_concurrency.lockutils [None req-30dafbf5-eff8-41ae-9cbc-a3dc6cafde3b tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "912ff104-9c97-4486-99c8-71a35180abb0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.881s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.905240] env[61974]: DEBUG oslo_vmware.api [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379379, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.271791} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.905601] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1036.905807] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1036.906025] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1036.906237] env[61974]: INFO nova.compute.manager [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1036.906527] env[61974]: DEBUG oslo.service.loopingcall [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1036.907225] env[61974]: DEBUG nova.compute.manager [-] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1036.907399] env[61974]: DEBUG nova.network.neutron [-] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1037.071753] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379380, 'name': CreateVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.122589] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379381, 'name': Rename_Task, 'duration_secs': 0.166555} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.122949] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1037.123254] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2911ae16-9264-40e9-bc11-bd5d69ee23bd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.131227] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1037.131227] env[61974]: value = "task-1379382" [ 1037.131227] env[61974]: _type = "Task" [ 1037.131227] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.141185] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379382, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.260924] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d20cbd5b-240c-4646-8e8e-cd6672a6d30e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1037.266865] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ce35f2b-80fc-4eb6-826e-342d09942aa2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.278107] env[61974]: DEBUG oslo_vmware.api [None req-d20cbd5b-240c-4646-8e8e-cd6672a6d30e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1037.278107] env[61974]: value = "task-1379383" [ 1037.278107] env[61974]: _type = "Task" [ 1037.278107] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.288609] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.292646] env[61974]: DEBUG oslo_vmware.api [None req-d20cbd5b-240c-4646-8e8e-cd6672a6d30e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379383, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.298109] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac3649c7-f9c8-4eaf-a647-ff5e798d1cf7 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.299513] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.011s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.301168] env[61974]: INFO nova.compute.claims [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1037.321455] env[61974]: DEBUG oslo_vmware.api [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379368, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.574800] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379380, 'name': CreateVM_Task, 'duration_secs': 0.580865} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.575154] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f475d963-0c09-4115-885a-04e28895df14] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1037.576116] env[61974]: DEBUG oslo_concurrency.lockutils [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.576408] env[61974]: DEBUG oslo_concurrency.lockutils [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.576910] env[61974]: DEBUG oslo_concurrency.lockutils [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1037.577298] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c25117e6-107c-4fe2-8b6d-176d875a6951 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.585156] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1037.585156] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]528139cf-15f2-85b0-d6f2-626dd8b58c79" [ 1037.585156] env[61974]: _type = "Task" [ 1037.585156] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.597217] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]528139cf-15f2-85b0-d6f2-626dd8b58c79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.666580] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379382, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.721022] env[61974]: DEBUG nova.network.neutron [req-68334985-8bed-4da8-83d1-e7bc5515a54e req-34d823ae-c018-445f-b5a4-4a82f69c407c service nova] [instance: f475d963-0c09-4115-885a-04e28895df14] Updated VIF entry in instance network info cache for port 3f49a7db-65cf-4082-b6f3-4f26169fe49a. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1037.721454] env[61974]: DEBUG nova.network.neutron [req-68334985-8bed-4da8-83d1-e7bc5515a54e req-34d823ae-c018-445f-b5a4-4a82f69c407c service nova] [instance: f475d963-0c09-4115-885a-04e28895df14] Updating instance_info_cache with network_info: [{"id": "3f49a7db-65cf-4082-b6f3-4f26169fe49a", "address": "fa:16:3e:97:7b:81", "network": {"id": "615a7a34-a392-45bd-ba4d-7b39605e520b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432153827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d104a741ebad47748ae5646356589fce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f49a7db-65", "ovs_interfaceid": "3f49a7db-65cf-4082-b6f3-4f26169fe49a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.745776] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "603bcf2a-fc99-4ba4-b757-c37d93554870" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.745955] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "603bcf2a-fc99-4ba4-b757-c37d93554870" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.746203] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "603bcf2a-fc99-4ba4-b757-c37d93554870-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.746437] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "603bcf2a-fc99-4ba4-b757-c37d93554870-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.746642] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "603bcf2a-fc99-4ba4-b757-c37d93554870-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.748963] env[61974]: INFO nova.compute.manager [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Terminating instance [ 1037.750888] env[61974]: DEBUG nova.compute.manager [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1037.751102] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1037.751942] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a80f0c0-5448-4faf-a0b4-1281824e951c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.765667] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1037.765968] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c711794b-8c9d-4f6b-b89a-2241e450ae80 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.774508] env[61974]: DEBUG oslo_vmware.api [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1037.774508] env[61974]: value = "task-1379384" [ 1037.774508] env[61974]: _type = "Task" [ 1037.774508] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.790324] env[61974]: DEBUG oslo_vmware.api [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379384, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.794980] env[61974]: DEBUG oslo_vmware.api [None req-d20cbd5b-240c-4646-8e8e-cd6672a6d30e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379383, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.819696] env[61974]: DEBUG oslo_vmware.api [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379368, 'name': ReconfigVM_Task, 'duration_secs': 5.965886} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.819696] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Releasing lock "9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1037.819981] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Reconfigured VM to detach interface {{(pid=61974) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1038.098129] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]528139cf-15f2-85b0-d6f2-626dd8b58c79, 'name': SearchDatastore_Task, 'duration_secs': 0.018613} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.100145] env[61974]: DEBUG oslo_concurrency.lockutils [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1038.100145] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1038.100145] env[61974]: DEBUG oslo_concurrency.lockutils [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1038.100145] env[61974]: DEBUG oslo_concurrency.lockutils [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.100145] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1038.100145] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c33d5562-98b2-446e-bc59-4b6dbadb7b8e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.110459] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1038.110830] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1038.111966] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c1d7e13-04c6-4725-b8dd-8d4f249baa41 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.118897] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1038.118897] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5222301d-1096-b90a-d904-d757496409b7" [ 1038.118897] env[61974]: _type = "Task" [ 1038.118897] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.128389] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5222301d-1096-b90a-d904-d757496409b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.141860] env[61974]: DEBUG oslo_vmware.api [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379382, 'name': PowerOnVM_Task, 'duration_secs': 0.546934} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.142169] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1038.142388] env[61974]: INFO nova.compute.manager [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Took 8.18 seconds to spawn the instance on the hypervisor. [ 1038.142577] env[61974]: DEBUG nova.compute.manager [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1038.143392] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d4c630-09be-4ea2-b681-42ad47ff11d1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.215585] env[61974]: DEBUG nova.network.neutron [-] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.219360] env[61974]: DEBUG nova.network.neutron [-] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.224165] env[61974]: DEBUG oslo_concurrency.lockutils [req-68334985-8bed-4da8-83d1-e7bc5515a54e req-34d823ae-c018-445f-b5a4-4a82f69c407c service nova] Releasing lock "refresh_cache-f475d963-0c09-4115-885a-04e28895df14" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1038.289097] env[61974]: DEBUG oslo_vmware.api [None req-d20cbd5b-240c-4646-8e8e-cd6672a6d30e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379383, 'name': PowerOnVM_Task, 'duration_secs': 0.514696} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.292274] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d20cbd5b-240c-4646-8e8e-cd6672a6d30e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1038.292474] env[61974]: DEBUG nova.compute.manager [None req-d20cbd5b-240c-4646-8e8e-cd6672a6d30e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1038.292768] env[61974]: DEBUG oslo_vmware.api [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379384, 'name': PowerOffVM_Task, 'duration_secs': 0.238654} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.293495] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb9572b3-8e3c-487d-9fb3-879fedd24864 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.296035] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1038.296212] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1038.296849] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8b04260-c6c5-4aea-88df-af17e0acfefc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.487330] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db1c97b-f738-4fb2-85cb-6ba1c7de0ad1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.494482] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1038.494721] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1038.494886] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Deleting the datastore file [datastore2] 603bcf2a-fc99-4ba4-b757-c37d93554870 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1038.495169] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2765de59-1ca8-49e2-b2b7-4647adb424d4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.500559] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77944060-cd7f-43e8-bf39-7fbeba7bdd76 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.504977] env[61974]: DEBUG oslo_vmware.api [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1038.504977] env[61974]: value = "task-1379386" [ 1038.504977] env[61974]: _type = "Task" [ 1038.504977] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.536391] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aaad4d4-86ea-447f-9d51-2f9541f3bdb5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.542615] env[61974]: DEBUG oslo_vmware.api [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379386, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.549307] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b71b52-e7da-4111-9a58-a631c79fb620 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.563689] env[61974]: DEBUG nova.compute.provider_tree [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.581123] env[61974]: DEBUG nova.compute.manager [req-fb4b1121-33a4-4862-ad69-534fdf7338d6 req-db8d4092-e66e-4d82-926a-d14f6c1b9960 service nova] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Received event network-vif-deleted-1dbdb496-b82f-458d-a43d-7575a27ca979 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1038.581498] env[61974]: DEBUG nova.compute.manager [req-fb4b1121-33a4-4862-ad69-534fdf7338d6 req-db8d4092-e66e-4d82-926a-d14f6c1b9960 service nova] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Received event network-vif-deleted-e3d592f0-8ee9-4b5c-9397-cf3da1294c61 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1038.631091] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5222301d-1096-b90a-d904-d757496409b7, 'name': SearchDatastore_Task, 'duration_secs': 0.022645} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.632387] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02cec698-510f-4504-aeb4-917e4a83b319 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.640060] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1038.640060] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52cb9397-eefb-8208-7544-d240fdfae315" [ 1038.640060] env[61974]: _type = "Task" [ 1038.640060] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.648788] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52cb9397-eefb-8208-7544-d240fdfae315, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.661817] env[61974]: INFO nova.compute.manager [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Took 13.04 seconds to build instance. [ 1038.718393] env[61974]: INFO nova.compute.manager [-] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Took 1.81 seconds to deallocate network for instance. [ 1038.725733] env[61974]: INFO nova.compute.manager [-] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Took 2.02 seconds to deallocate network for instance. [ 1039.019154] env[61974]: DEBUG oslo_vmware.api [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379386, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.416325} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.019512] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1039.019744] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1039.019996] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1039.020240] env[61974]: INFO nova.compute.manager [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1039.020667] env[61974]: DEBUG oslo.service.loopingcall [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1039.020809] env[61974]: DEBUG nova.compute.manager [-] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1039.020912] env[61974]: DEBUG nova.network.neutron [-] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1039.069075] env[61974]: DEBUG nova.scheduler.client.report [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1039.150793] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52cb9397-eefb-8208-7544-d240fdfae315, 'name': SearchDatastore_Task, 'duration_secs': 0.03295} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.151222] env[61974]: DEBUG oslo_concurrency.lockutils [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1039.151562] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] f475d963-0c09-4115-885a-04e28895df14/f475d963-0c09-4115-885a-04e28895df14.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1039.151983] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-76053f1f-da71-41b6-be4a-061d2edfd358 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.164850] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a8adc6d0-36d7-4c6f-8233-671df6c37a5b tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.552s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.165018] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1039.165018] env[61974]: value = "task-1379387" [ 1039.165018] env[61974]: _type = "Task" [ 1039.165018] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.174372] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379387, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.227842] env[61974]: DEBUG oslo_concurrency.lockutils [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.233781] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.264596] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.264596] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquired lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.264596] env[61974]: DEBUG nova.network.neutron [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1039.577821] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.277s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.577821] env[61974]: DEBUG nova.compute.manager [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1039.580617] env[61974]: DEBUG oslo_concurrency.lockutils [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.353s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.580893] env[61974]: DEBUG nova.objects.instance [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lazy-loading 'resources' on Instance uuid 5780d1d6-cd40-4b97-8a68-072c090540af {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1039.684587] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379387, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.710474] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "9c26e20b-dfc4-432c-a851-499dbea18f01" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.710822] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "9c26e20b-dfc4-432c-a851-499dbea18f01" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.711071] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "9c26e20b-dfc4-432c-a851-499dbea18f01-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.711278] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "9c26e20b-dfc4-432c-a851-499dbea18f01-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.711455] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "9c26e20b-dfc4-432c-a851-499dbea18f01-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.714205] env[61974]: INFO nova.compute.manager [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Terminating instance [ 1039.717073] env[61974]: DEBUG nova.compute.manager [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1039.717252] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1039.718411] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2f6c9b-43e9-4e1d-b923-7c731ae69fe1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.731585] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1039.732184] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6f6776ad-7d4d-4fa8-95bb-4a21ab2158eb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.744867] env[61974]: DEBUG oslo_vmware.api [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 1039.744867] env[61974]: value = "task-1379388" [ 1039.744867] env[61974]: _type = "Task" [ 1039.744867] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.759420] env[61974]: DEBUG oslo_vmware.api [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379388, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.835153] env[61974]: DEBUG nova.network.neutron [-] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.084318] env[61974]: DEBUG nova.compute.utils [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1040.087148] env[61974]: DEBUG nova.compute.manager [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1040.087425] env[61974]: DEBUG nova.network.neutron [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1040.178546] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379387, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.843421} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.178985] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] f475d963-0c09-4115-885a-04e28895df14/f475d963-0c09-4115-885a-04e28895df14.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1040.179102] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1040.179349] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cde5de90-bfac-41e4-ae97-9bb2bfde2c08 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.186597] env[61974]: DEBUG nova.policy [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c5f6d80a0784b1f8ff2b2fcfbb44232', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '40e43abf62a5464091aa725e1cff2b50', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1040.190052] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1040.190052] env[61974]: value = "task-1379389" [ 1040.190052] env[61974]: _type = "Task" [ 1040.190052] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.201127] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379389, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.233941] env[61974]: INFO nova.network.neutron [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Port 915eddb2-5b76-46da-8c84-a99ed89ca777 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1040.236039] env[61974]: DEBUG nova.network.neutron [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Updating instance_info_cache with network_info: [{"id": "a2b829bf-e2cb-41c7-a840-499beb350683", "address": "fa:16:3e:4a:5e:1d", "network": {"id": "a3cca736-a69c-4d05-a3fd-386cf3c4bee5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1390694517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "102785ae1c584cdb925a55afc3412fb9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2b829bf-e2", "ovs_interfaceid": "a2b829bf-e2cb-41c7-a840-499beb350683", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.262512] env[61974]: DEBUG oslo_vmware.api [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379388, 'name': PowerOffVM_Task, 'duration_secs': 0.390703} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.266666] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1040.266901] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1040.269165] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a4591456-99a8-4290-9932-f30b39fbb699 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.338868] env[61974]: INFO nova.compute.manager [-] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Took 1.32 seconds to deallocate network for instance. [ 1040.343982] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b11184a-576b-43ab-a4cf-5ee61fc2138c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.359730] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6f2698-192b-4a70-af20-dc07eeaf9dd4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.397176] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e6121c-57bb-4012-a889-82f7951ece38 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.403012] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1040.403923] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1040.404207] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Deleting the datastore file [datastore1] 9c26e20b-dfc4-432c-a851-499dbea18f01 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1040.405051] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be44b471-2ef8-48f7-9119-8b5717a27a2b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.412428] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8bc6437-c8f4-42ac-8f1d-0515c07842cf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.418396] env[61974]: DEBUG oslo_vmware.api [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 1040.418396] env[61974]: value = "task-1379391" [ 1040.418396] env[61974]: _type = "Task" [ 1040.418396] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.434368] env[61974]: DEBUG nova.compute.provider_tree [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.442949] env[61974]: DEBUG oslo_vmware.api [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379391, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.595011] env[61974]: DEBUG nova.compute.manager [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1040.620594] env[61974]: DEBUG nova.compute.manager [req-07973aaa-3290-4e54-b74d-0916c9d0ae76 req-2998aa9d-2361-4b1e-9e25-9acfcfbfae18 service nova] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Received event network-vif-deleted-1d8be3ef-cc07-4962-8443-8b4f3bce14ce {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1040.643514] env[61974]: DEBUG nova.network.neutron [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Successfully created port: 3ccab2a0-7919-4a4a-953b-0abac563e24a {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1040.702108] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379389, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093976} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.702379] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1040.703442] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-680bd8e5-9cae-4b18-a8ec-6b719419ac79 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.728463] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] f475d963-0c09-4115-885a-04e28895df14/f475d963-0c09-4115-885a-04e28895df14.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1040.728816] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96d0799a-8d1a-4c24-9ebd-6ffe0060a312 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.744463] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Releasing lock "refresh_cache-9c26e20b-dfc4-432c-a851-499dbea18f01" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.755722] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1040.755722] env[61974]: value = "task-1379392" [ 1040.755722] env[61974]: _type = "Task" [ 1040.755722] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.767354] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379392, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.832077] env[61974]: DEBUG oslo_vmware.rw_handles [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52938b71-7326-a83f-a33a-4640ea0401ee/disk-0.vmdk. {{(pid=61974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1040.834794] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c575ff16-35f6-4865-9caf-9b2fc5c80b1e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.842068] env[61974]: DEBUG oslo_vmware.rw_handles [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52938b71-7326-a83f-a33a-4640ea0401ee/disk-0.vmdk is in state: ready. {{(pid=61974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1040.842305] env[61974]: ERROR oslo_vmware.rw_handles [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52938b71-7326-a83f-a33a-4640ea0401ee/disk-0.vmdk due to incomplete transfer. [ 1040.843391] env[61974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f031a293-e23e-4e33-ba81-e18b45c94fdb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.851405] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.855156] env[61974]: DEBUG oslo_vmware.rw_handles [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52938b71-7326-a83f-a33a-4640ea0401ee/disk-0.vmdk. {{(pid=61974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1040.855425] env[61974]: DEBUG nova.virt.vmwareapi.images [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Uploaded image 7e8de138-c44e-4516-9083-c48e99a4114f to the Glance image server {{(pid=61974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1040.857946] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Destroying the VM {{(pid=61974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1040.858239] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e6dce058-aaa1-43bd-b84d-755c14bea287 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.867227] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 1040.867227] env[61974]: value = "task-1379393" [ 1040.867227] env[61974]: _type = "Task" [ 1040.867227] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.877008] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379393, 'name': Destroy_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.930356] env[61974]: DEBUG oslo_vmware.api [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379391, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.937538] env[61974]: DEBUG nova.scheduler.client.report [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1041.249023] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2bcd1d59-eff0-458b-a03c-3bae3abd1583 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "interface-9c26e20b-dfc4-432c-a851-499dbea18f01-915eddb2-5b76-46da-8c84-a99ed89ca777" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.032s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.267747] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379392, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.322432] env[61974]: DEBUG nova.compute.manager [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Stashing vm_state: active {{(pid=61974) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1041.377229] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379393, 'name': Destroy_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.430439] env[61974]: DEBUG oslo_vmware.api [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379391, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.631095} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.431134] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1041.431491] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1041.431792] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1041.432039] env[61974]: INFO nova.compute.manager [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Took 1.71 seconds to destroy the instance on the hypervisor. [ 1041.432342] env[61974]: DEBUG oslo.service.loopingcall [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1041.432587] env[61974]: DEBUG nova.compute.manager [-] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1041.432710] env[61974]: DEBUG nova.network.neutron [-] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1041.442247] env[61974]: DEBUG oslo_concurrency.lockutils [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.862s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.446085] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.212s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.446085] env[61974]: DEBUG nova.objects.instance [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lazy-loading 'resources' on Instance uuid a2fbbc4a-92da-4917-a73e-a37a8980c62c {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1041.467010] env[61974]: INFO nova.scheduler.client.report [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Deleted allocations for instance 5780d1d6-cd40-4b97-8a68-072c090540af [ 1041.605599] env[61974]: DEBUG nova.compute.manager [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1041.638275] env[61974]: DEBUG nova.virt.hardware [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1041.638673] env[61974]: DEBUG nova.virt.hardware [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1041.638781] env[61974]: DEBUG nova.virt.hardware [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1041.638953] env[61974]: DEBUG nova.virt.hardware [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1041.639799] env[61974]: DEBUG nova.virt.hardware [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1041.639799] env[61974]: DEBUG nova.virt.hardware [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1041.640166] env[61974]: DEBUG nova.virt.hardware [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1041.640287] env[61974]: DEBUG nova.virt.hardware [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1041.640538] env[61974]: DEBUG nova.virt.hardware [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1041.640746] env[61974]: DEBUG nova.virt.hardware [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1041.640947] env[61974]: DEBUG nova.virt.hardware [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1041.641914] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8117e55-bafd-48fe-8ee6-58ced1504847 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.666990] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e23c84f-e4d9-491b-b04d-7019a0675a03 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.767324] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379392, 'name': ReconfigVM_Task, 'duration_secs': 0.953773} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.767623] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Reconfigured VM instance instance-0000005f to attach disk [datastore2] f475d963-0c09-4115-885a-04e28895df14/f475d963-0c09-4115-885a-04e28895df14.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1041.768313] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c909877b-0105-47f4-b316-663e9de63758 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.774970] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1041.774970] env[61974]: value = "task-1379394" [ 1041.774970] env[61974]: _type = "Task" [ 1041.774970] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.783661] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379394, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.841570] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.878416] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379393, 'name': Destroy_Task, 'duration_secs': 0.874199} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.878638] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Destroyed the VM [ 1041.878893] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Deleting Snapshot of the VM instance {{(pid=61974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1041.879200] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-521f565d-f2b8-46c1-ab25-3bde6acf5fff {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.888595] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 1041.888595] env[61974]: value = "task-1379395" [ 1041.888595] env[61974]: _type = "Task" [ 1041.888595] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.898458] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379395, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.974216] env[61974]: DEBUG oslo_concurrency.lockutils [None req-3f5a2822-6a16-4f2a-92c5-3ea19ef6d252 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "5780d1d6-cd40-4b97-8a68-072c090540af" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.220s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.143887] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d01564f4-12cf-4946-9ba2-6c7eaee3208e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.153717] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1acfcff1-318b-4eef-8478-28cb2b466244 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.195755] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f1500d3-3327-416f-9868-312190f21ffd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.208295] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73b0ad5-31e4-40cd-929b-46682af950e6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.226329] env[61974]: DEBUG nova.compute.provider_tree [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1042.286504] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379394, 'name': Rename_Task, 'duration_secs': 0.159255} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.286809] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1042.287079] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8eeac90f-cae7-4426-9ece-324b37582238 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.294529] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1042.294529] env[61974]: value = "task-1379396" [ 1042.294529] env[61974]: _type = "Task" [ 1042.294529] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.303687] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379396, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.399231] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379395, 'name': RemoveSnapshot_Task, 'duration_secs': 0.491855} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.399618] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Deleted Snapshot of the VM instance {{(pid=61974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1042.399934] env[61974]: DEBUG nova.compute.manager [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1042.400748] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ecc8fa-6385-4f7d-bbb2-58dd2bf3e77d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.532978] env[61974]: DEBUG nova.network.neutron [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Successfully updated port: 3ccab2a0-7919-4a4a-953b-0abac563e24a {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1042.650248] env[61974]: DEBUG nova.compute.manager [req-7e722a80-eed6-4efb-8e6f-539d99a5d4f5 req-54d71c75-3bc3-4222-8070-2693f306b6c3 service nova] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Received event network-vif-plugged-3ccab2a0-7919-4a4a-953b-0abac563e24a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1042.650699] env[61974]: DEBUG oslo_concurrency.lockutils [req-7e722a80-eed6-4efb-8e6f-539d99a5d4f5 req-54d71c75-3bc3-4222-8070-2693f306b6c3 service nova] Acquiring lock "e3dc39a5-4e90-472d-8b62-fd17572852f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.650964] env[61974]: DEBUG oslo_concurrency.lockutils [req-7e722a80-eed6-4efb-8e6f-539d99a5d4f5 req-54d71c75-3bc3-4222-8070-2693f306b6c3 service nova] Lock "e3dc39a5-4e90-472d-8b62-fd17572852f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.651322] env[61974]: DEBUG oslo_concurrency.lockutils [req-7e722a80-eed6-4efb-8e6f-539d99a5d4f5 req-54d71c75-3bc3-4222-8070-2693f306b6c3 service nova] Lock "e3dc39a5-4e90-472d-8b62-fd17572852f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.651574] env[61974]: DEBUG nova.compute.manager [req-7e722a80-eed6-4efb-8e6f-539d99a5d4f5 req-54d71c75-3bc3-4222-8070-2693f306b6c3 service nova] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] No waiting events found dispatching network-vif-plugged-3ccab2a0-7919-4a4a-953b-0abac563e24a {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1042.651840] env[61974]: WARNING nova.compute.manager [req-7e722a80-eed6-4efb-8e6f-539d99a5d4f5 req-54d71c75-3bc3-4222-8070-2693f306b6c3 service nova] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Received unexpected event network-vif-plugged-3ccab2a0-7919-4a4a-953b-0abac563e24a for instance with vm_state building and task_state spawning. [ 1042.652162] env[61974]: DEBUG nova.compute.manager [req-7e722a80-eed6-4efb-8e6f-539d99a5d4f5 req-54d71c75-3bc3-4222-8070-2693f306b6c3 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Received event network-vif-deleted-a2b829bf-e2cb-41c7-a840-499beb350683 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1042.653290] env[61974]: INFO nova.compute.manager [req-7e722a80-eed6-4efb-8e6f-539d99a5d4f5 req-54d71c75-3bc3-4222-8070-2693f306b6c3 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Neutron deleted interface a2b829bf-e2cb-41c7-a840-499beb350683; detaching it from the instance and deleting it from the info cache [ 1042.653290] env[61974]: DEBUG nova.network.neutron [req-7e722a80-eed6-4efb-8e6f-539d99a5d4f5 req-54d71c75-3bc3-4222-8070-2693f306b6c3 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.703261] env[61974]: DEBUG oslo_concurrency.lockutils [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "1a04b388-8739-4b46-a8e1-cd79835bcf48" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.703570] env[61974]: DEBUG oslo_concurrency.lockutils [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "1a04b388-8739-4b46-a8e1-cd79835bcf48" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.703889] env[61974]: DEBUG oslo_concurrency.lockutils [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "1a04b388-8739-4b46-a8e1-cd79835bcf48-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.704115] env[61974]: DEBUG oslo_concurrency.lockutils [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "1a04b388-8739-4b46-a8e1-cd79835bcf48-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.704308] env[61974]: DEBUG oslo_concurrency.lockutils [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "1a04b388-8739-4b46-a8e1-cd79835bcf48-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.707982] env[61974]: INFO nova.compute.manager [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Terminating instance [ 1042.710137] env[61974]: DEBUG nova.compute.manager [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1042.710408] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1042.711284] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3f8a7a-9ff1-4420-8448-c26e8d743bd3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.722160] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1042.722490] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c4514f3-a802-4572-9146-da4ac8d7a736 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.729474] env[61974]: DEBUG nova.scheduler.client.report [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1042.738451] env[61974]: DEBUG oslo_vmware.api [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1042.738451] env[61974]: value = "task-1379397" [ 1042.738451] env[61974]: _type = "Task" [ 1042.738451] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.747788] env[61974]: DEBUG oslo_vmware.api [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379397, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.806949] env[61974]: DEBUG oslo_vmware.api [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379396, 'name': PowerOnVM_Task, 'duration_secs': 0.476869} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.807440] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1042.807999] env[61974]: INFO nova.compute.manager [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Took 8.79 seconds to spawn the instance on the hypervisor. [ 1042.808612] env[61974]: DEBUG nova.compute.manager [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1042.809220] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a12866d-550f-4e84-84ac-a04dcf9aed2a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.914825] env[61974]: INFO nova.compute.manager [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Shelve offloading [ 1042.916718] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1042.916976] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-286f7e39-50e9-4c9d-be11-51a844b99193 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.924509] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 1042.924509] env[61974]: value = "task-1379398" [ 1042.924509] env[61974]: _type = "Task" [ 1042.924509] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.934685] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379398, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.030740] env[61974]: DEBUG nova.network.neutron [-] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.035782] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "refresh_cache-e3dc39a5-4e90-472d-8b62-fd17572852f7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1043.035912] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquired lock "refresh_cache-e3dc39a5-4e90-472d-8b62-fd17572852f7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.036070] env[61974]: DEBUG nova.network.neutron [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1043.156869] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0b61e839-c275-43eb-a7a0-125a2f904142 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.169536] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57865a2d-e3fa-46aa-8482-f1b4efc8bcdf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.202998] env[61974]: DEBUG nova.compute.manager [req-7e722a80-eed6-4efb-8e6f-539d99a5d4f5 req-54d71c75-3bc3-4222-8070-2693f306b6c3 service nova] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Detach interface failed, port_id=a2b829bf-e2cb-41c7-a840-499beb350683, reason: Instance 9c26e20b-dfc4-432c-a851-499dbea18f01 could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1043.203285] env[61974]: DEBUG nova.compute.manager [req-7e722a80-eed6-4efb-8e6f-539d99a5d4f5 req-54d71c75-3bc3-4222-8070-2693f306b6c3 service nova] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Received event network-changed-3ccab2a0-7919-4a4a-953b-0abac563e24a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1043.203540] env[61974]: DEBUG nova.compute.manager [req-7e722a80-eed6-4efb-8e6f-539d99a5d4f5 req-54d71c75-3bc3-4222-8070-2693f306b6c3 service nova] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Refreshing instance network info cache due to event network-changed-3ccab2a0-7919-4a4a-953b-0abac563e24a. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1043.203661] env[61974]: DEBUG oslo_concurrency.lockutils [req-7e722a80-eed6-4efb-8e6f-539d99a5d4f5 req-54d71c75-3bc3-4222-8070-2693f306b6c3 service nova] Acquiring lock "refresh_cache-e3dc39a5-4e90-472d-8b62-fd17572852f7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1043.236781] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.791s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.239599] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.388s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.239854] env[61974]: DEBUG nova.objects.instance [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lazy-loading 'resources' on Instance uuid 603bcf2a-fc99-4ba4-b757-c37d93554870 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1043.251017] env[61974]: DEBUG oslo_vmware.api [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379397, 'name': PowerOffVM_Task, 'duration_secs': 0.277602} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.251282] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1043.251455] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1043.251712] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-31f817ec-c795-46d2-a58d-447f9ebd5dcd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.260091] env[61974]: INFO nova.scheduler.client.report [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Deleted allocations for instance a2fbbc4a-92da-4917-a73e-a37a8980c62c [ 1043.329959] env[61974]: INFO nova.compute.manager [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Took 13.74 seconds to build instance. [ 1043.396779] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1043.397012] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1043.397210] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Deleting the datastore file [datastore1] 1a04b388-8739-4b46-a8e1-cd79835bcf48 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1043.397538] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-712640f5-f9f6-471c-9739-82cd9c7401bf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.408689] env[61974]: DEBUG oslo_vmware.api [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1043.408689] env[61974]: value = "task-1379400" [ 1043.408689] env[61974]: _type = "Task" [ 1043.408689] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.418493] env[61974]: DEBUG oslo_vmware.api [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379400, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.435936] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] VM already powered off {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1043.435936] env[61974]: DEBUG nova.compute.manager [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1043.436673] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a49f0d-e357-4c90-bf42-56eaa4813196 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.443236] env[61974]: DEBUG oslo_concurrency.lockutils [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "refresh_cache-1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1043.443419] env[61974]: DEBUG oslo_concurrency.lockutils [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquired lock "refresh_cache-1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.443680] env[61974]: DEBUG nova.network.neutron [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1043.533533] env[61974]: INFO nova.compute.manager [-] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Took 2.10 seconds to deallocate network for instance. [ 1043.580475] env[61974]: DEBUG nova.network.neutron [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1043.716728] env[61974]: DEBUG nova.network.neutron [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Updating instance_info_cache with network_info: [{"id": "3ccab2a0-7919-4a4a-953b-0abac563e24a", "address": "fa:16:3e:de:a4:2c", "network": {"id": "870e2179-016d-4c2a-8dff-b56143e5db7b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1966833648-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40e43abf62a5464091aa725e1cff2b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ccab2a0-79", "ovs_interfaceid": "3ccab2a0-7919-4a4a-953b-0abac563e24a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.771259] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8f3da1df-2c5f-40ae-bfc0-09f426ac81c5 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "a2fbbc4a-92da-4917-a73e-a37a8980c62c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.708s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.831855] env[61974]: DEBUG oslo_concurrency.lockutils [None req-02277221-1edd-43d5-ad73-006895ed538d tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "f475d963-0c09-4115-885a-04e28895df14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.252s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.919238] env[61974]: DEBUG oslo_vmware.api [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379400, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.258857} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.920650] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1043.920912] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1043.921224] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1043.921488] env[61974]: INFO nova.compute.manager [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1043.921806] env[61974]: DEBUG oslo.service.loopingcall [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1043.922538] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-289c088b-2b11-4a08-b981-e92bc6acdc19 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.926300] env[61974]: DEBUG nova.compute.manager [-] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1043.926420] env[61974]: DEBUG nova.network.neutron [-] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1043.934627] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea91abf6-f215-481f-ad2e-865500f4786e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.972337] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a3f79a-6c8d-4222-8e6f-f1bc465f8248 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.981760] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7914cd-450c-4df3-9b40-b374a7f2feaf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.997713] env[61974]: DEBUG nova.compute.provider_tree [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1044.041938] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.219908] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Releasing lock "refresh_cache-e3dc39a5-4e90-472d-8b62-fd17572852f7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.220261] env[61974]: DEBUG nova.compute.manager [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Instance network_info: |[{"id": "3ccab2a0-7919-4a4a-953b-0abac563e24a", "address": "fa:16:3e:de:a4:2c", "network": {"id": "870e2179-016d-4c2a-8dff-b56143e5db7b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1966833648-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40e43abf62a5464091aa725e1cff2b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ccab2a0-79", "ovs_interfaceid": "3ccab2a0-7919-4a4a-953b-0abac563e24a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1044.220663] env[61974]: DEBUG oslo_concurrency.lockutils [req-7e722a80-eed6-4efb-8e6f-539d99a5d4f5 req-54d71c75-3bc3-4222-8070-2693f306b6c3 service nova] Acquired lock "refresh_cache-e3dc39a5-4e90-472d-8b62-fd17572852f7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.220854] env[61974]: DEBUG nova.network.neutron [req-7e722a80-eed6-4efb-8e6f-539d99a5d4f5 req-54d71c75-3bc3-4222-8070-2693f306b6c3 service nova] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Refreshing network info cache for port 3ccab2a0-7919-4a4a-953b-0abac563e24a {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1044.225021] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:a4:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '171aeae0-6a27-44fc-bc3d-a2d5581fc702', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3ccab2a0-7919-4a4a-953b-0abac563e24a', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1044.230488] env[61974]: DEBUG oslo.service.loopingcall [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1044.235186] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1044.235901] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a1b23d2-3c99-4a8a-8dee-b7a428aa11b2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.258135] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1044.258135] env[61974]: value = "task-1379401" [ 1044.258135] env[61974]: _type = "Task" [ 1044.258135] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.267235] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379401, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.503494] env[61974]: DEBUG nova.scheduler.client.report [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1044.533432] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1044.533770] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1044.594340] env[61974]: DEBUG nova.network.neutron [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Updating instance_info_cache with network_info: [{"id": "e377f334-8d36-4f17-8532-abbd37c47eba", "address": "fa:16:3e:7f:63:47", "network": {"id": "a6e61508-1f16-48a9-a21d-2f9212fcf523", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1486161933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb6e7e7e52fc4aacaf5054732cd7d2df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ccbc7a-cf8d-4ea2-8411-291a1e27df7b", "external-id": "nsx-vlan-transportzone-998", "segmentation_id": 998, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape377f334-8d", "ovs_interfaceid": "e377f334-8d36-4f17-8532-abbd37c47eba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.668715] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c99f34e4-d57f-4cd2-b091-113f88c04a79 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "f475d963-0c09-4115-885a-04e28895df14" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.669115] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c99f34e4-d57f-4cd2-b091-113f88c04a79 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "f475d963-0c09-4115-885a-04e28895df14" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.669350] env[61974]: DEBUG nova.compute.manager [None req-c99f34e4-d57f-4cd2-b091-113f88c04a79 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1044.670286] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8533bbd4-3407-448b-bdd9-38fc64f2cae7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.677482] env[61974]: DEBUG nova.compute.manager [None req-c99f34e4-d57f-4cd2-b091-113f88c04a79 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61974) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1044.678115] env[61974]: DEBUG nova.objects.instance [None req-c99f34e4-d57f-4cd2-b091-113f88c04a79 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lazy-loading 'flavor' on Instance uuid f475d963-0c09-4115-885a-04e28895df14 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1044.769503] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379401, 'name': CreateVM_Task, 'duration_secs': 0.353467} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.769734] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1044.770453] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.770633] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.770965] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1044.771256] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa9b0098-e6db-452d-9547-185999e2fb1d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.774227] env[61974]: DEBUG nova.network.neutron [req-7e722a80-eed6-4efb-8e6f-539d99a5d4f5 req-54d71c75-3bc3-4222-8070-2693f306b6c3 service nova] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Updated VIF entry in instance network info cache for port 3ccab2a0-7919-4a4a-953b-0abac563e24a. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1044.774227] env[61974]: DEBUG nova.network.neutron [req-7e722a80-eed6-4efb-8e6f-539d99a5d4f5 req-54d71c75-3bc3-4222-8070-2693f306b6c3 service nova] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Updating instance_info_cache with network_info: [{"id": "3ccab2a0-7919-4a4a-953b-0abac563e24a", "address": "fa:16:3e:de:a4:2c", "network": {"id": "870e2179-016d-4c2a-8dff-b56143e5db7b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1966833648-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40e43abf62a5464091aa725e1cff2b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "171aeae0-6a27-44fc-bc3d-a2d5581fc702", "external-id": "nsx-vlan-transportzone-410", "segmentation_id": 410, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ccab2a0-79", "ovs_interfaceid": "3ccab2a0-7919-4a4a-953b-0abac563e24a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.780277] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1044.780277] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52554a62-87db-e397-48ed-eaf4c28efe79" [ 1044.780277] env[61974]: _type = "Task" [ 1044.780277] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.790390] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52554a62-87db-e397-48ed-eaf4c28efe79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.975808] env[61974]: DEBUG nova.compute.manager [req-9151f4df-4c79-48b1-8e0b-ad39d680f869 req-4dce24a3-e0ca-4bd1-bd10-65941c8fb055 service nova] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Received event network-vif-deleted-f32df777-3ba7-47f1-9845-8327f4f53fe8 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1044.976045] env[61974]: INFO nova.compute.manager [req-9151f4df-4c79-48b1-8e0b-ad39d680f869 req-4dce24a3-e0ca-4bd1-bd10-65941c8fb055 service nova] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Neutron deleted interface f32df777-3ba7-47f1-9845-8327f4f53fe8; detaching it from the instance and deleting it from the info cache [ 1044.976234] env[61974]: DEBUG nova.network.neutron [req-9151f4df-4c79-48b1-8e0b-ad39d680f869 req-4dce24a3-e0ca-4bd1-bd10-65941c8fb055 service nova] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.008826] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.769s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.011909] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 3.170s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.035384] env[61974]: INFO nova.scheduler.client.report [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Deleted allocations for instance 603bcf2a-fc99-4ba4-b757-c37d93554870 [ 1045.043138] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1045.043388] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Starting heal instance info cache {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1045.043514] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Rebuilding the list of instances to heal {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1045.097105] env[61974]: DEBUG oslo_concurrency.lockutils [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Releasing lock "refresh_cache-1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.183167] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c99f34e4-d57f-4cd2-b091-113f88c04a79 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1045.183413] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8851f52-c954-4040-a211-ac9f693be409 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.192020] env[61974]: DEBUG oslo_vmware.api [None req-c99f34e4-d57f-4cd2-b091-113f88c04a79 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1045.192020] env[61974]: value = "task-1379402" [ 1045.192020] env[61974]: _type = "Task" [ 1045.192020] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.201932] env[61974]: DEBUG oslo_vmware.api [None req-c99f34e4-d57f-4cd2-b091-113f88c04a79 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379402, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.278415] env[61974]: DEBUG oslo_concurrency.lockutils [req-7e722a80-eed6-4efb-8e6f-539d99a5d4f5 req-54d71c75-3bc3-4222-8070-2693f306b6c3 service nova] Releasing lock "refresh_cache-e3dc39a5-4e90-472d-8b62-fd17572852f7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.294985] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52554a62-87db-e397-48ed-eaf4c28efe79, 'name': SearchDatastore_Task, 'duration_secs': 0.011065} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.294985] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.295243] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1045.295640] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.295734] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.295918] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1045.296248] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b5c3214-5d13-4365-b6e2-fd0dcc6fa169 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.310326] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1045.310532] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1045.311361] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d939e9b-3988-492e-ad7a-cfe3a25c19ff {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.318578] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1045.318578] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]524adacd-91dc-9a52-8353-4e610dc0396a" [ 1045.318578] env[61974]: _type = "Task" [ 1045.318578] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.328983] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]524adacd-91dc-9a52-8353-4e610dc0396a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.450265] env[61974]: DEBUG nova.network.neutron [-] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.478907] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5ddd8af-ba41-4dd6-82a5-10db515df664 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.490305] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15172cfe-c544-4e84-b03d-4afec66f1ad8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.519873] env[61974]: INFO nova.compute.claims [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1045.535735] env[61974]: DEBUG nova.compute.manager [req-9151f4df-4c79-48b1-8e0b-ad39d680f869 req-4dce24a3-e0ca-4bd1-bd10-65941c8fb055 service nova] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Detach interface failed, port_id=f32df777-3ba7-47f1-9845-8327f4f53fe8, reason: Instance 1a04b388-8739-4b46-a8e1-cd79835bcf48 could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1045.545954] env[61974]: DEBUG oslo_concurrency.lockutils [None req-91c56241-d414-403f-a51f-ed04fa85319a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "603bcf2a-fc99-4ba4-b757-c37d93554870" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.800s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.550033] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Skipping network cache update for instance because it is being deleted. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1045.550407] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Skipping network cache update for instance because it is Building. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1045.596892] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1045.596892] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87f8a07b-ffd6-4b56-8b01-8769e5ce8a70 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.605114] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1045.607013] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2cf24189-7dd4-4e78-ae68-1c8b9e2e1e46 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.618016] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "refresh_cache-1c1404fd-a954-4849-883b-7898a7e87e2b" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.618016] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquired lock "refresh_cache-1c1404fd-a954-4849-883b-7898a7e87e2b" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.618016] env[61974]: DEBUG nova.network.neutron [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Forcefully refreshing network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1045.618198] env[61974]: DEBUG nova.objects.instance [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lazy-loading 'info_cache' on Instance uuid 1c1404fd-a954-4849-883b-7898a7e87e2b {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1045.687138] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1045.687405] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1045.687577] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Deleting the datastore file [datastore2] 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1045.687880] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1a276618-dd07-4ccd-8a3d-c6dd44efb1e7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.703453] env[61974]: DEBUG oslo_vmware.api [None req-c99f34e4-d57f-4cd2-b091-113f88c04a79 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379402, 'name': PowerOffVM_Task, 'duration_secs': 0.2134} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.705553] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c99f34e4-d57f-4cd2-b091-113f88c04a79 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1045.705553] env[61974]: DEBUG nova.compute.manager [None req-c99f34e4-d57f-4cd2-b091-113f88c04a79 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1045.709025] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 1045.709025] env[61974]: value = "task-1379404" [ 1045.709025] env[61974]: _type = "Task" [ 1045.709025] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.709025] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b960a6-f6d1-414b-8a45-6955228e55f7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.726703] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379404, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.832777] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]524adacd-91dc-9a52-8353-4e610dc0396a, 'name': SearchDatastore_Task, 'duration_secs': 0.01057} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.833651] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb2bc61b-22d3-4f3e-b3d1-6255263c4969 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.841651] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1045.841651] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5278a68c-a6bf-0029-e4a2-468ce042c587" [ 1045.841651] env[61974]: _type = "Task" [ 1045.841651] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.854959] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5278a68c-a6bf-0029-e4a2-468ce042c587, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.954508] env[61974]: INFO nova.compute.manager [-] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Took 2.03 seconds to deallocate network for instance. [ 1046.038901] env[61974]: INFO nova.compute.resource_tracker [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Updating resource usage from migration 897477ea-f863-4278-ac49-cbd1a2013020 [ 1046.226242] env[61974]: DEBUG oslo_vmware.api [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379404, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193217} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.226242] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1046.226242] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1046.226242] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1046.235129] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c99f34e4-d57f-4cd2-b091-113f88c04a79 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "f475d963-0c09-4115-885a-04e28895df14" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.564s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.251779] env[61974]: INFO nova.scheduler.client.report [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Deleted allocations for instance 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb [ 1046.275701] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aaa3f0b-ef25-473f-98ee-c9372bfe9e36 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.284565] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e757a50-70b4-47fa-876a-1e54638ee273 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.317402] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c64c3061-8835-42f4-8fdb-068a4373c7fd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.325846] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b22cf7e5-a8ad-43b4-8c73-58098f28681d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.343229] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "ca8a238c-4b52-4016-8614-c2f8ad7891f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.343491] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "ca8a238c-4b52-4016-8614-c2f8ad7891f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.345012] env[61974]: DEBUG nova.compute.provider_tree [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1046.356268] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5278a68c-a6bf-0029-e4a2-468ce042c587, 'name': SearchDatastore_Task, 'duration_secs': 0.011562} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.356519] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.356773] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] e3dc39a5-4e90-472d-8b62-fd17572852f7/e3dc39a5-4e90-472d-8b62-fd17572852f7.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1046.357255] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e0e1aa3-98dc-4c64-bea6-81fdfc1fc041 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.365328] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1046.365328] env[61974]: value = "task-1379405" [ 1046.365328] env[61974]: _type = "Task" [ 1046.365328] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.373483] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379405, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.461366] env[61974]: DEBUG oslo_concurrency.lockutils [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.635060] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.635425] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.757033] env[61974]: DEBUG oslo_concurrency.lockutils [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.847712] env[61974]: DEBUG nova.compute.manager [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1046.850867] env[61974]: DEBUG nova.scheduler.client.report [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1046.876314] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379405, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499225} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.876609] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] e3dc39a5-4e90-472d-8b62-fd17572852f7/e3dc39a5-4e90-472d-8b62-fd17572852f7.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1046.876851] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1046.877181] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b945ccf5-718e-4148-8630-e803a3c2fab4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.885650] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1046.885650] env[61974]: value = "task-1379406" [ 1046.885650] env[61974]: _type = "Task" [ 1046.885650] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.896478] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379406, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.034757] env[61974]: DEBUG nova.compute.manager [req-12bb0349-431c-4c21-9619-13fec53a8e6d req-6161f080-6a14-4d79-bbb6-11adf64d9d20 service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Received event network-vif-unplugged-e377f334-8d36-4f17-8532-abbd37c47eba {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1047.035008] env[61974]: DEBUG oslo_concurrency.lockutils [req-12bb0349-431c-4c21-9619-13fec53a8e6d req-6161f080-6a14-4d79-bbb6-11adf64d9d20 service nova] Acquiring lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.035248] env[61974]: DEBUG oslo_concurrency.lockutils [req-12bb0349-431c-4c21-9619-13fec53a8e6d req-6161f080-6a14-4d79-bbb6-11adf64d9d20 service nova] Lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.035394] env[61974]: DEBUG oslo_concurrency.lockutils [req-12bb0349-431c-4c21-9619-13fec53a8e6d req-6161f080-6a14-4d79-bbb6-11adf64d9d20 service nova] Lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.035562] env[61974]: DEBUG nova.compute.manager [req-12bb0349-431c-4c21-9619-13fec53a8e6d req-6161f080-6a14-4d79-bbb6-11adf64d9d20 service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] No waiting events found dispatching network-vif-unplugged-e377f334-8d36-4f17-8532-abbd37c47eba {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1047.035735] env[61974]: WARNING nova.compute.manager [req-12bb0349-431c-4c21-9619-13fec53a8e6d req-6161f080-6a14-4d79-bbb6-11adf64d9d20 service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Received unexpected event network-vif-unplugged-e377f334-8d36-4f17-8532-abbd37c47eba for instance with vm_state shelved_offloaded and task_state None. [ 1047.035896] env[61974]: DEBUG nova.compute.manager [req-12bb0349-431c-4c21-9619-13fec53a8e6d req-6161f080-6a14-4d79-bbb6-11adf64d9d20 service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Received event network-changed-e377f334-8d36-4f17-8532-abbd37c47eba {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1047.036113] env[61974]: DEBUG nova.compute.manager [req-12bb0349-431c-4c21-9619-13fec53a8e6d req-6161f080-6a14-4d79-bbb6-11adf64d9d20 service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Refreshing instance network info cache due to event network-changed-e377f334-8d36-4f17-8532-abbd37c47eba. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1047.036245] env[61974]: DEBUG oslo_concurrency.lockutils [req-12bb0349-431c-4c21-9619-13fec53a8e6d req-6161f080-6a14-4d79-bbb6-11adf64d9d20 service nova] Acquiring lock "refresh_cache-1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.036384] env[61974]: DEBUG oslo_concurrency.lockutils [req-12bb0349-431c-4c21-9619-13fec53a8e6d req-6161f080-6a14-4d79-bbb6-11adf64d9d20 service nova] Acquired lock "refresh_cache-1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.036546] env[61974]: DEBUG nova.network.neutron [req-12bb0349-431c-4c21-9619-13fec53a8e6d req-6161f080-6a14-4d79-bbb6-11adf64d9d20 service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Refreshing network info cache for port e377f334-8d36-4f17-8532-abbd37c47eba {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1047.137980] env[61974]: DEBUG nova.compute.manager [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1047.357564] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.346s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.357821] env[61974]: INFO nova.compute.manager [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Migrating [ 1047.367271] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.326s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.367557] env[61974]: DEBUG nova.objects.instance [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lazy-loading 'resources' on Instance uuid 9c26e20b-dfc4-432c-a851-499dbea18f01 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1047.371019] env[61974]: DEBUG nova.network.neutron [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Updating instance_info_cache with network_info: [{"id": "f76d592c-5eee-4379-b971-9896eb2bb538", "address": "fa:16:3e:d0:c9:51", "network": {"id": "be36ebfc-3548-4420-b5b4-b3efb499516a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1190763400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.201", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61c671d85b64b28872586c2816b83f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf76d592c-5e", "ovs_interfaceid": "f76d592c-5eee-4379-b971-9896eb2bb538", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.386903] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.398817] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379406, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078741} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.399113] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1047.400021] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f410a6-10cc-49f7-b30c-f8be5092bacc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.402685] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "f475d963-0c09-4115-885a-04e28895df14" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.402911] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "f475d963-0c09-4115-885a-04e28895df14" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.403127] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "f475d963-0c09-4115-885a-04e28895df14-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.403319] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "f475d963-0c09-4115-885a-04e28895df14-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.403501] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "f475d963-0c09-4115-885a-04e28895df14-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.405755] env[61974]: INFO nova.compute.manager [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Terminating instance [ 1047.407775] env[61974]: DEBUG nova.compute.manager [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1047.408016] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1047.408789] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-975f6613-c1ff-4a48-83f5-575e55bcf689 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.429738] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] e3dc39a5-4e90-472d-8b62-fd17572852f7/e3dc39a5-4e90-472d-8b62-fd17572852f7.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1047.430513] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3dc570df-e876-4670-9963-b08ed593156f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.447524] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1047.448252] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0e9da5a-6e5b-440d-bd20-a934508e2269 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.455096] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1047.455096] env[61974]: value = "task-1379407" [ 1047.455096] env[61974]: _type = "Task" [ 1047.455096] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.462518] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379407, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.525705] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1047.525944] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1047.526147] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Deleting the datastore file [datastore2] f475d963-0c09-4115-885a-04e28895df14 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1047.526424] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d6301351-af49-474a-880b-9e28281e71cf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.533404] env[61974]: DEBUG oslo_vmware.api [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1047.533404] env[61974]: value = "task-1379409" [ 1047.533404] env[61974]: _type = "Task" [ 1047.533404] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.543718] env[61974]: DEBUG oslo_vmware.api [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379409, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.657258] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.812932] env[61974]: DEBUG nova.network.neutron [req-12bb0349-431c-4c21-9619-13fec53a8e6d req-6161f080-6a14-4d79-bbb6-11adf64d9d20 service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Updated VIF entry in instance network info cache for port e377f334-8d36-4f17-8532-abbd37c47eba. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1047.812932] env[61974]: DEBUG nova.network.neutron [req-12bb0349-431c-4c21-9619-13fec53a8e6d req-6161f080-6a14-4d79-bbb6-11adf64d9d20 service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Updating instance_info_cache with network_info: [{"id": "e377f334-8d36-4f17-8532-abbd37c47eba", "address": "fa:16:3e:7f:63:47", "network": {"id": "a6e61508-1f16-48a9-a21d-2f9212fcf523", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1486161933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb6e7e7e52fc4aacaf5054732cd7d2df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tape377f334-8d", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.882543] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Releasing lock "refresh_cache-1c1404fd-a954-4849-883b-7898a7e87e2b" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.882737] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Updated the network info_cache for instance {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1047.883328] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "refresh_cache-bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.883489] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "refresh_cache-bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.883718] env[61974]: DEBUG nova.network.neutron [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1047.885042] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1047.885184] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1047.885597] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1047.885828] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1047.886064] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1047.886220] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1047.886337] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1047.886482] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager.update_available_resource {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1047.964849] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379407, 'name': ReconfigVM_Task, 'duration_secs': 0.420602} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.965157] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Reconfigured VM instance instance-00000060 to attach disk [datastore1] e3dc39a5-4e90-472d-8b62-fd17572852f7/e3dc39a5-4e90-472d-8b62-fd17572852f7.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1047.965810] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-758b79e8-5050-4dfb-8910-75fafbcb4265 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.977128] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1047.977128] env[61974]: value = "task-1379410" [ 1047.977128] env[61974]: _type = "Task" [ 1047.977128] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.988399] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379410, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.039518] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d43c85-a92e-4636-aadc-7de9a5dc9526 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.050579] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d40a982-9426-4a7b-8ba0-9c22eeddb6be {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.053649] env[61974]: DEBUG oslo_vmware.api [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379409, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148197} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.053912] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1048.054133] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1048.054322] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1048.054533] env[61974]: INFO nova.compute.manager [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: f475d963-0c09-4115-885a-04e28895df14] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1048.054794] env[61974]: DEBUG oslo.service.loopingcall [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1048.055332] env[61974]: DEBUG nova.compute.manager [-] [instance: f475d963-0c09-4115-885a-04e28895df14] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1048.055452] env[61974]: DEBUG nova.network.neutron [-] [instance: f475d963-0c09-4115-885a-04e28895df14] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1048.083714] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8ec3fa-85aa-4b2c-9af9-190f2c334e3b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.093584] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9fd422d-13d1-4cad-b0b1-5414f595a470 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.106812] env[61974]: DEBUG nova.compute.provider_tree [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1048.314808] env[61974]: DEBUG oslo_concurrency.lockutils [req-12bb0349-431c-4c21-9619-13fec53a8e6d req-6161f080-6a14-4d79-bbb6-11adf64d9d20 service nova] Releasing lock "refresh_cache-1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.390078] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.489148] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379410, 'name': Rename_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.574865] env[61974]: DEBUG nova.network.neutron [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Updating instance_info_cache with network_info: [{"id": "62349265-e925-44c7-8158-8bfcb7fc0478", "address": "fa:16:3e:6b:58:30", "network": {"id": "b42774a0-686b-4132-a599-7cec777b9919", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1826867553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dae05232e0041dba49b0432d64d82d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62349265-e9", "ovs_interfaceid": "62349265-e925-44c7-8158-8bfcb7fc0478", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.610128] env[61974]: DEBUG nova.scheduler.client.report [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1048.859295] env[61974]: DEBUG nova.network.neutron [-] [instance: f475d963-0c09-4115-885a-04e28895df14] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.867441] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.988517] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379410, 'name': Rename_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.059164] env[61974]: DEBUG nova.compute.manager [req-bb487e8d-9b5b-43c5-a043-21bac6528385 req-b9b92c53-f574-470f-a4d1-1e21ea000f76 service nova] [instance: f475d963-0c09-4115-885a-04e28895df14] Received event network-vif-deleted-3f49a7db-65cf-4082-b6f3-4f26169fe49a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1049.077134] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "refresh_cache-bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1049.114526] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.747s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.116836] env[61974]: DEBUG oslo_concurrency.lockutils [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.656s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.117082] env[61974]: DEBUG nova.objects.instance [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lazy-loading 'resources' on Instance uuid 1a04b388-8739-4b46-a8e1-cd79835bcf48 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1049.138208] env[61974]: INFO nova.scheduler.client.report [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Deleted allocations for instance 9c26e20b-dfc4-432c-a851-499dbea18f01 [ 1049.363064] env[61974]: INFO nova.compute.manager [-] [instance: f475d963-0c09-4115-885a-04e28895df14] Took 1.31 seconds to deallocate network for instance. [ 1049.488079] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379410, 'name': Rename_Task, 'duration_secs': 1.154996} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.488351] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1049.488592] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9452fd20-9dc2-49c8-85c3-96fa939156e2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.495245] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1049.495245] env[61974]: value = "task-1379411" [ 1049.495245] env[61974]: _type = "Task" [ 1049.495245] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.502989] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379411, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.645851] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a931f2ef-203f-4310-9072-dd45cf159bbe tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "9c26e20b-dfc4-432c-a851-499dbea18f01" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.935s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.760382] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-608315d2-0abc-47c7-a43a-e7b0a62d84c2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.768216] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0034082d-828d-4d33-9428-373f59b0423c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.798299] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c3bf1a6-a5c3-4f03-b6d3-c4324def12e9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.806097] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69aecffc-2664-480f-9649-19b57bf350f6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.819857] env[61974]: DEBUG nova.compute.provider_tree [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1049.869027] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.889908] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "ceb0dd02-6441-4923-99f6-73f8eab86fe5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.890175] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "ceb0dd02-6441-4923-99f6-73f8eab86fe5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.890360] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "ceb0dd02-6441-4923-99f6-73f8eab86fe5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.890555] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "ceb0dd02-6441-4923-99f6-73f8eab86fe5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.890734] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "ceb0dd02-6441-4923-99f6-73f8eab86fe5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.893130] env[61974]: INFO nova.compute.manager [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Terminating instance [ 1049.895093] env[61974]: DEBUG nova.compute.manager [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1049.895301] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1049.896149] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e79c5cd-4606-4adb-b54b-a3d69fac04c8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.905232] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1049.905543] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a96e946f-0905-4543-8fde-b7e49f25da89 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.913461] env[61974]: DEBUG oslo_vmware.api [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 1049.913461] env[61974]: value = "task-1379412" [ 1049.913461] env[61974]: _type = "Task" [ 1049.913461] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.922473] env[61974]: DEBUG oslo_vmware.api [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379412, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.006230] env[61974]: DEBUG oslo_vmware.api [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379411, 'name': PowerOnVM_Task, 'duration_secs': 0.418288} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.006530] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1050.006783] env[61974]: INFO nova.compute.manager [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Took 8.40 seconds to spawn the instance on the hypervisor. [ 1050.007015] env[61974]: DEBUG nova.compute.manager [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1050.007868] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c347ad-e91b-4616-bd97-f379e8646368 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.323121] env[61974]: DEBUG nova.scheduler.client.report [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1050.424824] env[61974]: DEBUG oslo_vmware.api [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379412, 'name': PowerOffVM_Task, 'duration_secs': 0.248119} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.425085] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1050.425264] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1050.425517] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8489b98d-d728-425a-8df4-b0c874ddd133 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.492682] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1050.492908] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1050.493159] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Deleting the datastore file [datastore1] ceb0dd02-6441-4923-99f6-73f8eab86fe5 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1050.493543] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-569a10d2-c485-4dfb-85ce-547985704efc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.499828] env[61974]: DEBUG oslo_vmware.api [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for the task: (returnval){ [ 1050.499828] env[61974]: value = "task-1379414" [ 1050.499828] env[61974]: _type = "Task" [ 1050.499828] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.507727] env[61974]: DEBUG oslo_vmware.api [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379414, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.525273] env[61974]: INFO nova.compute.manager [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Took 13.26 seconds to build instance. [ 1050.592176] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-463615ba-d874-40ee-844d-1b03e1599f36 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.612828] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Updating instance 'bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63' progress to 0 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1050.829022] env[61974]: DEBUG oslo_concurrency.lockutils [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.712s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.831858] env[61974]: DEBUG oslo_concurrency.lockutils [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.075s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.831858] env[61974]: DEBUG nova.objects.instance [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lazy-loading 'resources' on Instance uuid 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1050.848921] env[61974]: INFO nova.scheduler.client.report [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Deleted allocations for instance 1a04b388-8739-4b46-a8e1-cd79835bcf48 [ 1051.012090] env[61974]: DEBUG oslo_vmware.api [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Task: {'id': task-1379414, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165891} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.012429] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1051.012598] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1051.012780] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1051.012961] env[61974]: INFO nova.compute.manager [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1051.013233] env[61974]: DEBUG oslo.service.loopingcall [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1051.013438] env[61974]: DEBUG nova.compute.manager [-] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1051.013536] env[61974]: DEBUG nova.network.neutron [-] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1051.027249] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4fd15f09-6c24-456e-a5c1-7a48cb86b5aa tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "e3dc39a5-4e90-472d-8b62-fd17572852f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.775s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.119207] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1051.119559] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d7da4168-41ba-4624-9717-2e2b2fe582d4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.127303] env[61974]: DEBUG oslo_vmware.api [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1051.127303] env[61974]: value = "task-1379415" [ 1051.127303] env[61974]: _type = "Task" [ 1051.127303] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.136819] env[61974]: DEBUG oslo_vmware.api [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379415, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.336236] env[61974]: DEBUG nova.objects.instance [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lazy-loading 'numa_topology' on Instance uuid 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.357089] env[61974]: DEBUG oslo_concurrency.lockutils [None req-31f97859-6ee7-4f91-9479-16e471f25ba7 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "1a04b388-8739-4b46-a8e1-cd79835bcf48" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.653s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.412376] env[61974]: DEBUG nova.compute.manager [req-efda1055-2fa0-40f3-925c-8b6effaf41d4 req-4bfa76e6-d4a5-4669-bcae-5bafa41cd1fe service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Received event network-vif-deleted-a342d02a-7577-428c-946f-e5725112ceec {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1051.412376] env[61974]: INFO nova.compute.manager [req-efda1055-2fa0-40f3-925c-8b6effaf41d4 req-4bfa76e6-d4a5-4669-bcae-5bafa41cd1fe service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Neutron deleted interface a342d02a-7577-428c-946f-e5725112ceec; detaching it from the instance and deleting it from the info cache [ 1051.412376] env[61974]: DEBUG nova.network.neutron [req-efda1055-2fa0-40f3-925c-8b6effaf41d4 req-4bfa76e6-d4a5-4669-bcae-5bafa41cd1fe service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.638423] env[61974]: DEBUG oslo_vmware.api [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379415, 'name': PowerOffVM_Task, 'duration_secs': 0.188796} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.638423] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1051.638423] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Updating instance 'bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63' progress to 17 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1051.837847] env[61974]: DEBUG nova.network.neutron [-] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.839494] env[61974]: DEBUG nova.objects.base [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Object Instance<1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb> lazy-loaded attributes: resources,numa_topology {{(pid=61974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1051.846082] env[61974]: DEBUG nova.compute.manager [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1051.846997] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93801bb1-3dc6-4d71-8142-ef4614799c1a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.915549] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca324756-490f-45c1-9499-ba5ffe96292d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.927760] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4542aaf7-9d61-42a3-b8ef-b0c6e6e7e108 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.956011] env[61974]: DEBUG nova.compute.manager [req-efda1055-2fa0-40f3-925c-8b6effaf41d4 req-4bfa76e6-d4a5-4669-bcae-5bafa41cd1fe service nova] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Detach interface failed, port_id=a342d02a-7577-428c-946f-e5725112ceec, reason: Instance ceb0dd02-6441-4923-99f6-73f8eab86fe5 could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1051.992206] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9acd7909-7c9f-41c7-b5e6-5c5fbf45f740 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.999835] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c3e445-18b2-49d3-b080-b4c094b749f0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.031517] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed34e08a-0dd6-42d0-af98-8bd2ad3bd3a1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.039170] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30fe5dbc-b178-4454-ae50-51a00cbe05fe {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.053038] env[61974]: DEBUG nova.compute.provider_tree [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1052.143738] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1052.144052] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1052.144165] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1052.144351] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1052.144502] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1052.144661] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1052.144866] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1052.145039] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1052.145213] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1052.145381] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1052.145563] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1052.150538] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ed43570-c5da-4d56-a82f-45deb07783ff {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.171191] env[61974]: DEBUG oslo_vmware.api [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1052.171191] env[61974]: value = "task-1379416" [ 1052.171191] env[61974]: _type = "Task" [ 1052.171191] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.181772] env[61974]: DEBUG oslo_vmware.api [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379416, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.223116] env[61974]: DEBUG oslo_concurrency.lockutils [None req-3f63fe59-3728-4a3b-9a69-f631135ff8fe tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "1c1404fd-a954-4849-883b-7898a7e87e2b" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1052.223432] env[61974]: DEBUG oslo_concurrency.lockutils [None req-3f63fe59-3728-4a3b-9a69-f631135ff8fe tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "1c1404fd-a954-4849-883b-7898a7e87e2b" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1052.343444] env[61974]: INFO nova.compute.manager [-] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Took 1.33 seconds to deallocate network for instance. [ 1052.358693] env[61974]: INFO nova.compute.manager [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] instance snapshotting [ 1052.361752] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e273b3-f976-44b6-a17b-3ffeac867c64 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.382846] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-632493b6-afb4-415f-92b3-ba0804210931 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.556858] env[61974]: DEBUG nova.scheduler.client.report [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1052.682160] env[61974]: DEBUG oslo_vmware.api [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379416, 'name': ReconfigVM_Task, 'duration_secs': 0.355749} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.682492] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Updating instance 'bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63' progress to 33 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1052.726218] env[61974]: INFO nova.compute.manager [None req-3f63fe59-3728-4a3b-9a69-f631135ff8fe tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Detaching volume 8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2 [ 1052.767302] env[61974]: INFO nova.virt.block_device [None req-3f63fe59-3728-4a3b-9a69-f631135ff8fe tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Attempting to driver detach volume 8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2 from mountpoint /dev/sdb [ 1052.767593] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f63fe59-3728-4a3b-9a69-f631135ff8fe tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Volume detach. Driver type: vmdk {{(pid=61974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1052.767830] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f63fe59-3728-4a3b-9a69-f631135ff8fe tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-292996', 'volume_id': '8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2', 'name': 'volume-8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1c1404fd-a954-4849-883b-7898a7e87e2b', 'attached_at': '', 'detached_at': '', 'volume_id': '8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2', 'serial': '8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2'} {{(pid=61974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1052.768832] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80b52a7-e4ea-48d5-83f2-3fd539f1b53b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.793286] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfbb4db4-9bdc-4007-9468-9565bc513628 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.801380] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa440ef-614a-45c9-8e2e-e2b5d6fb301b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.822926] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d658dbe1-5398-4b08-918f-51d8b3c2fc76 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.838499] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f63fe59-3728-4a3b-9a69-f631135ff8fe tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] The volume has not been displaced from its original location: [datastore2] volume-8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2/volume-8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2.vmdk. No consolidation needed. {{(pid=61974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1052.843714] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f63fe59-3728-4a3b-9a69-f631135ff8fe tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Reconfiguring VM instance instance-00000043 to detach disk 2001 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1052.844015] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae83a591-0cd1-4b9e-a379-203f11328f81 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.856832] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1052.862846] env[61974]: DEBUG oslo_vmware.api [None req-3f63fe59-3728-4a3b-9a69-f631135ff8fe tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1052.862846] env[61974]: value = "task-1379417" [ 1052.862846] env[61974]: _type = "Task" [ 1052.862846] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.870860] env[61974]: DEBUG oslo_vmware.api [None req-3f63fe59-3728-4a3b-9a69-f631135ff8fe tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379417, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.895137] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Creating Snapshot of the VM instance {{(pid=61974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1052.895468] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-bda1d82e-4bbe-4ee6-a8c4-a1d4c4a72d03 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.903635] env[61974]: DEBUG oslo_vmware.api [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1052.903635] env[61974]: value = "task-1379418" [ 1052.903635] env[61974]: _type = "Task" [ 1052.903635] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.912323] env[61974]: DEBUG oslo_vmware.api [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379418, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.062565] env[61974]: DEBUG oslo_concurrency.lockutils [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.231s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.065296] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.678s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.066746] env[61974]: INFO nova.compute.claims [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1053.188631] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1053.188920] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1053.189094] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1053.189291] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1053.189467] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1053.189722] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1053.189968] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1053.190162] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1053.190332] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1053.190526] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1053.190728] env[61974]: DEBUG nova.virt.hardware [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1053.196074] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Reconfiguring VM instance instance-0000005e to detach disk 2000 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1053.196374] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2cc1d58e-f138-4b8d-b6ac-9bf31fd16abf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.216095] env[61974]: DEBUG oslo_vmware.api [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1053.216095] env[61974]: value = "task-1379419" [ 1053.216095] env[61974]: _type = "Task" [ 1053.216095] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.229042] env[61974]: DEBUG oslo_vmware.api [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379419, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.373922] env[61974]: DEBUG oslo_vmware.api [None req-3f63fe59-3728-4a3b-9a69-f631135ff8fe tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379417, 'name': ReconfigVM_Task, 'duration_secs': 0.237091} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.374207] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f63fe59-3728-4a3b-9a69-f631135ff8fe tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Reconfigured VM instance instance-00000043 to detach disk 2001 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1053.378894] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84e248e1-e4e0-4f59-b1ea-eb88515dae24 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.394707] env[61974]: DEBUG oslo_vmware.api [None req-3f63fe59-3728-4a3b-9a69-f631135ff8fe tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1053.394707] env[61974]: value = "task-1379420" [ 1053.394707] env[61974]: _type = "Task" [ 1053.394707] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.406151] env[61974]: DEBUG oslo_vmware.api [None req-3f63fe59-3728-4a3b-9a69-f631135ff8fe tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379420, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.415646] env[61974]: DEBUG oslo_vmware.api [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379418, 'name': CreateSnapshot_Task, 'duration_secs': 0.478779} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.415950] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Created Snapshot of the VM instance {{(pid=61974) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1053.416733] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77916a60-7c18-4f81-8067-4b76eaf4e29f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.575673] env[61974]: DEBUG oslo_concurrency.lockutils [None req-05dc29cb-a910-4f8e-a5d6-f26715d0fda2 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 28.157s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.576879] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 4.709s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.576879] env[61974]: INFO nova.compute.manager [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Unshelving [ 1053.726444] env[61974]: DEBUG oslo_vmware.api [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379419, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.904822] env[61974]: DEBUG oslo_vmware.api [None req-3f63fe59-3728-4a3b-9a69-f631135ff8fe tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379420, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.942941] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Creating linked-clone VM from snapshot {{(pid=61974) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1053.943353] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-cb084668-4cbe-4ed5-89e8-d4f45e45a396 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.953282] env[61974]: DEBUG oslo_vmware.api [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1053.953282] env[61974]: value = "task-1379421" [ 1053.953282] env[61974]: _type = "Task" [ 1053.953282] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.962055] env[61974]: DEBUG oslo_vmware.api [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379421, 'name': CloneVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.227451] env[61974]: DEBUG oslo_vmware.api [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379419, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.229148] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c7d8f4-bda9-497d-a417-6d5232aa289a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.236479] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-542cccf4-4ffe-40aa-9620-e3ba76863325 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.269612] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61360c68-96c8-4f7e-a30c-29d16d19c217 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.277679] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d0e3a6-34ed-4f73-8cb1-794abc235da4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.291258] env[61974]: DEBUG nova.compute.provider_tree [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.406585] env[61974]: DEBUG oslo_vmware.api [None req-3f63fe59-3728-4a3b-9a69-f631135ff8fe tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379420, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.463829] env[61974]: DEBUG oslo_vmware.api [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379421, 'name': CloneVM_Task} progress is 94%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.600855] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.728782] env[61974]: DEBUG oslo_vmware.api [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379419, 'name': ReconfigVM_Task, 'duration_secs': 1.173182} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.729176] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Reconfigured VM instance instance-0000005e to detach disk 2000 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1054.730163] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15674e45-dae7-46f7-b7ef-a852bd86be2f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.754674] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63/bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1054.755014] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3e284ef-cba5-4e5e-ad63-284189b6a9f5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.774970] env[61974]: DEBUG oslo_vmware.api [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1054.774970] env[61974]: value = "task-1379422" [ 1054.774970] env[61974]: _type = "Task" [ 1054.774970] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.783632] env[61974]: DEBUG oslo_vmware.api [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379422, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.794990] env[61974]: DEBUG nova.scheduler.client.report [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1054.905842] env[61974]: DEBUG oslo_vmware.api [None req-3f63fe59-3728-4a3b-9a69-f631135ff8fe tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379420, 'name': ReconfigVM_Task, 'duration_secs': 1.074135} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.906442] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f63fe59-3728-4a3b-9a69-f631135ff8fe tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-292996', 'volume_id': '8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2', 'name': 'volume-8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1c1404fd-a954-4849-883b-7898a7e87e2b', 'attached_at': '', 'detached_at': '', 'volume_id': '8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2', 'serial': '8740b394-2a0a-4c0f-b9e9-f8e3fbd3cfa2'} {{(pid=61974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1054.966063] env[61974]: DEBUG oslo_vmware.api [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379421, 'name': CloneVM_Task} progress is 95%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.285139] env[61974]: DEBUG oslo_vmware.api [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379422, 'name': ReconfigVM_Task, 'duration_secs': 0.256834} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.285607] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Reconfigured VM instance instance-0000005e to attach disk [datastore2] bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63/bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1055.285765] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Updating instance 'bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63' progress to 50 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1055.300913] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.236s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.301400] env[61974]: DEBUG nova.compute.manager [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1055.303650] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.647s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.305111] env[61974]: INFO nova.compute.claims [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1055.452321] env[61974]: DEBUG nova.objects.instance [None req-3f63fe59-3728-4a3b-9a69-f631135ff8fe tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lazy-loading 'flavor' on Instance uuid 1c1404fd-a954-4849-883b-7898a7e87e2b {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.465455] env[61974]: DEBUG oslo_vmware.api [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379421, 'name': CloneVM_Task, 'duration_secs': 1.222884} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.466343] env[61974]: INFO nova.virt.vmwareapi.vmops [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Created linked-clone VM from snapshot [ 1055.467084] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58343d28-e69f-449f-8fa7-a1bfee3cc2db {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.476575] env[61974]: DEBUG nova.virt.vmwareapi.images [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Uploading image bf826d5a-0c21-4c2a-ad0e-560df10b7ff9 {{(pid=61974) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1055.489065] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Destroying the VM {{(pid=61974) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1055.489544] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3e3a55a1-427a-4626-9e46-c9999c6376a3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.496714] env[61974]: DEBUG oslo_vmware.api [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1055.496714] env[61974]: value = "task-1379423" [ 1055.496714] env[61974]: _type = "Task" [ 1055.496714] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.504650] env[61974]: DEBUG oslo_vmware.api [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379423, 'name': Destroy_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.792650] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe0c557-eae2-4499-a26a-c2072ddfaf99 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.813836] env[61974]: DEBUG nova.compute.utils [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1055.817586] env[61974]: DEBUG nova.compute.manager [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1055.817706] env[61974]: DEBUG nova.network.neutron [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1055.820013] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a284b6-2a9e-4d1e-a943-ff797e954977 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.842057] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Updating instance 'bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63' progress to 67 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1055.887788] env[61974]: DEBUG nova.policy [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '139f2fab7d4c492ab0d6fb16ea947457', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4757d24b61794cfcaefff2ad44e02b74', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1056.008209] env[61974]: DEBUG oslo_vmware.api [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379423, 'name': Destroy_Task} progress is 33%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.176528] env[61974]: DEBUG nova.network.neutron [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Successfully created port: ff6934b6-3a62-4412-8f5f-06c123c3e260 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1056.318390] env[61974]: DEBUG nova.compute.manager [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1056.396621] env[61974]: DEBUG nova.network.neutron [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Port 62349265-e925-44c7-8158-8bfcb7fc0478 binding to destination host cpu-1 is already ACTIVE {{(pid=61974) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1056.458677] env[61974]: DEBUG oslo_concurrency.lockutils [None req-3f63fe59-3728-4a3b-9a69-f631135ff8fe tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "1c1404fd-a954-4849-883b-7898a7e87e2b" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.235s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.464576] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b5a5ff-b271-4e60-abcd-4d0f2a7939fe {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.473116] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e754f57-5a6c-4e31-a869-04ac9ac7a780 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.510446] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569d8c77-8348-4457-a2f8-5af837e01e7a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.520595] env[61974]: DEBUG oslo_vmware.api [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379423, 'name': Destroy_Task, 'duration_secs': 0.553023} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.521845] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a882800-b488-4012-9ffb-9995f3515879 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.525499] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Destroyed the VM [ 1056.525747] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Deleting Snapshot of the VM instance {{(pid=61974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1056.525987] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5b0b4fcb-176a-4cef-bcb7-897f6098aee3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.538802] env[61974]: DEBUG nova.compute.provider_tree [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1056.542470] env[61974]: DEBUG oslo_vmware.api [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1056.542470] env[61974]: value = "task-1379424" [ 1056.542470] env[61974]: _type = "Task" [ 1056.542470] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.551497] env[61974]: DEBUG oslo_vmware.api [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379424, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.041804] env[61974]: DEBUG nova.scheduler.client.report [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1057.055955] env[61974]: DEBUG oslo_vmware.api [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379424, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.332348] env[61974]: DEBUG nova.compute.manager [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1057.359233] env[61974]: DEBUG nova.virt.hardware [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1057.359502] env[61974]: DEBUG nova.virt.hardware [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1057.359700] env[61974]: DEBUG nova.virt.hardware [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1057.359893] env[61974]: DEBUG nova.virt.hardware [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1057.360063] env[61974]: DEBUG nova.virt.hardware [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1057.360222] env[61974]: DEBUG nova.virt.hardware [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1057.360431] env[61974]: DEBUG nova.virt.hardware [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1057.360596] env[61974]: DEBUG nova.virt.hardware [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1057.360804] env[61974]: DEBUG nova.virt.hardware [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1057.361015] env[61974]: DEBUG nova.virt.hardware [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1057.361213] env[61974]: DEBUG nova.virt.hardware [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1057.362058] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b97e6567-d9a0-4b7e-97a3-c63f2719b70b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.370418] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472f2984-0d06-4108-b263-0bd419ba1da6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.417658] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.417873] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.418075] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.509912] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "1c1404fd-a954-4849-883b-7898a7e87e2b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.510076] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "1c1404fd-a954-4849-883b-7898a7e87e2b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.510296] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "1c1404fd-a954-4849-883b-7898a7e87e2b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.510502] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "1c1404fd-a954-4849-883b-7898a7e87e2b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.510710] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "1c1404fd-a954-4849-883b-7898a7e87e2b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.512708] env[61974]: INFO nova.compute.manager [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Terminating instance [ 1057.514428] env[61974]: DEBUG nova.compute.manager [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1057.514626] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1057.515504] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36efedca-09fc-4023-a3e5-701ee241f942 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.524649] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1057.524891] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-171fd81f-d9c8-4b32-a2c2-7349bff72893 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.531430] env[61974]: DEBUG oslo_vmware.api [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1057.531430] env[61974]: value = "task-1379425" [ 1057.531430] env[61974]: _type = "Task" [ 1057.531430] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.541034] env[61974]: DEBUG oslo_vmware.api [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379425, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.544795] env[61974]: DEBUG nova.compute.manager [req-ed000370-06ea-4a20-b601-0f8c87fef83a req-feab1dca-57e2-43c6-af4b-d00fa21ad045 service nova] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Received event network-vif-plugged-ff6934b6-3a62-4412-8f5f-06c123c3e260 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1057.545044] env[61974]: DEBUG oslo_concurrency.lockutils [req-ed000370-06ea-4a20-b601-0f8c87fef83a req-feab1dca-57e2-43c6-af4b-d00fa21ad045 service nova] Acquiring lock "ca8a238c-4b52-4016-8614-c2f8ad7891f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.545499] env[61974]: DEBUG oslo_concurrency.lockutils [req-ed000370-06ea-4a20-b601-0f8c87fef83a req-feab1dca-57e2-43c6-af4b-d00fa21ad045 service nova] Lock "ca8a238c-4b52-4016-8614-c2f8ad7891f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.545499] env[61974]: DEBUG oslo_concurrency.lockutils [req-ed000370-06ea-4a20-b601-0f8c87fef83a req-feab1dca-57e2-43c6-af4b-d00fa21ad045 service nova] Lock "ca8a238c-4b52-4016-8614-c2f8ad7891f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.545639] env[61974]: DEBUG nova.compute.manager [req-ed000370-06ea-4a20-b601-0f8c87fef83a req-feab1dca-57e2-43c6-af4b-d00fa21ad045 service nova] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] No waiting events found dispatching network-vif-plugged-ff6934b6-3a62-4412-8f5f-06c123c3e260 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1057.545753] env[61974]: WARNING nova.compute.manager [req-ed000370-06ea-4a20-b601-0f8c87fef83a req-feab1dca-57e2-43c6-af4b-d00fa21ad045 service nova] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Received unexpected event network-vif-plugged-ff6934b6-3a62-4412-8f5f-06c123c3e260 for instance with vm_state building and task_state spawning. [ 1057.547378] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.244s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.548319] env[61974]: DEBUG nova.compute.manager [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1057.554238] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 9.164s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.554394] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.554545] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1057.554850] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.686s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.555103] env[61974]: DEBUG nova.objects.instance [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lazy-loading 'resources' on Instance uuid f475d963-0c09-4115-885a-04e28895df14 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1057.557718] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f106701d-162d-470a-8ef4-9c380331cc67 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.567372] env[61974]: DEBUG oslo_vmware.api [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379424, 'name': RemoveSnapshot_Task, 'duration_secs': 0.541946} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.571202] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Deleted Snapshot of the VM instance {{(pid=61974) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1057.575054] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae82953f-b949-49ca-89e2-d94385c4116c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.592777] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12da4ce3-d1f8-42bf-b129-a7eaea3534d0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.601444] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e958dbf0-52f4-4262-9a3d-4a98569fa70a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.637197] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179731MB free_disk=177GB free_vcpus=48 pci_devices=None {{(pid=61974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1057.637467] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.041431] env[61974]: DEBUG oslo_vmware.api [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379425, 'name': PowerOffVM_Task, 'duration_secs': 0.197241} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.041708] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1058.041886] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1058.042157] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b9b0938-3951-463e-8503-e6f649c43ce3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.058320] env[61974]: DEBUG nova.compute.utils [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1058.059731] env[61974]: DEBUG nova.compute.manager [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1058.062051] env[61974]: DEBUG nova.network.neutron [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1058.076048] env[61974]: WARNING nova.compute.manager [None req-573abdbd-1da4-4571-9625-91ec6f40fa11 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Image not found during snapshot: nova.exception.ImageNotFound: Image bf826d5a-0c21-4c2a-ad0e-560df10b7ff9 could not be found. [ 1058.114452] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1058.114611] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1058.114732] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Deleting the datastore file [datastore1] 1c1404fd-a954-4849-883b-7898a7e87e2b {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1058.115074] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-980b3219-675c-455d-8666-ad0991d3af2b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.123227] env[61974]: DEBUG oslo_vmware.api [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1058.123227] env[61974]: value = "task-1379427" [ 1058.123227] env[61974]: _type = "Task" [ 1058.123227] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.137797] env[61974]: DEBUG oslo_vmware.api [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379427, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.235447] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-753971ca-6cb9-4c38-bee3-3e7382779be7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.243985] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817a966b-08c2-4a9f-be60-4a34ee230d64 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.283222] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ec6fa4-ecce-4999-a201-d63041d44a53 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.291478] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55338f86-98dd-4ffb-91d6-8dd76a8ce153 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.304836] env[61974]: DEBUG nova.compute.provider_tree [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1058.345373] env[61974]: DEBUG nova.network.neutron [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Successfully updated port: ff6934b6-3a62-4412-8f5f-06c123c3e260 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1058.366044] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "e3dc39a5-4e90-472d-8b62-fd17572852f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.366345] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "e3dc39a5-4e90-472d-8b62-fd17572852f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1058.366657] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "e3dc39a5-4e90-472d-8b62-fd17572852f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.367023] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "e3dc39a5-4e90-472d-8b62-fd17572852f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1058.367190] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "e3dc39a5-4e90-472d-8b62-fd17572852f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.369756] env[61974]: INFO nova.compute.manager [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Terminating instance [ 1058.372715] env[61974]: DEBUG nova.compute.manager [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1058.372992] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1058.374138] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db71b36-5614-4c1d-af6e-22cc6f39e2be {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.379652] env[61974]: DEBUG nova.policy [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4896588cebd84071a573046de7006429', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2db6af28263c40708c2466226ce03009', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1058.387922] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1058.388450] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-da32a46b-70bc-4228-acfd-c684b3994f80 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.396098] env[61974]: DEBUG oslo_vmware.api [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1058.396098] env[61974]: value = "task-1379428" [ 1058.396098] env[61974]: _type = "Task" [ 1058.396098] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.408204] env[61974]: DEBUG oslo_vmware.api [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379428, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.434132] env[61974]: DEBUG nova.compute.manager [req-72e26be4-7a06-4720-b744-9f98473eaca9 req-ab55b9ad-4775-4605-9e6e-a5b68c29d790 service nova] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Received event network-changed-ff6934b6-3a62-4412-8f5f-06c123c3e260 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1058.434351] env[61974]: DEBUG nova.compute.manager [req-72e26be4-7a06-4720-b744-9f98473eaca9 req-ab55b9ad-4775-4605-9e6e-a5b68c29d790 service nova] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Refreshing instance network info cache due to event network-changed-ff6934b6-3a62-4412-8f5f-06c123c3e260. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1058.434570] env[61974]: DEBUG oslo_concurrency.lockutils [req-72e26be4-7a06-4720-b744-9f98473eaca9 req-ab55b9ad-4775-4605-9e6e-a5b68c29d790 service nova] Acquiring lock "refresh_cache-ca8a238c-4b52-4016-8614-c2f8ad7891f7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1058.434725] env[61974]: DEBUG oslo_concurrency.lockutils [req-72e26be4-7a06-4720-b744-9f98473eaca9 req-ab55b9ad-4775-4605-9e6e-a5b68c29d790 service nova] Acquired lock "refresh_cache-ca8a238c-4b52-4016-8614-c2f8ad7891f7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.434888] env[61974]: DEBUG nova.network.neutron [req-72e26be4-7a06-4720-b744-9f98473eaca9 req-ab55b9ad-4775-4605-9e6e-a5b68c29d790 service nova] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Refreshing network info cache for port ff6934b6-3a62-4412-8f5f-06c123c3e260 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1058.470483] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "refresh_cache-bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1058.470674] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "refresh_cache-bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.470854] env[61974]: DEBUG nova.network.neutron [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1058.564159] env[61974]: DEBUG nova.compute.manager [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1058.633658] env[61974]: DEBUG oslo_vmware.api [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379427, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1389} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.633927] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1058.634133] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1058.634321] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1058.634507] env[61974]: INFO nova.compute.manager [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1058.634756] env[61974]: DEBUG oslo.service.loopingcall [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1058.634947] env[61974]: DEBUG nova.compute.manager [-] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1058.635053] env[61974]: DEBUG nova.network.neutron [-] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1058.737718] env[61974]: DEBUG nova.network.neutron [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Successfully created port: e43abc26-b25e-444f-9857-3967570e294a {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1058.808523] env[61974]: DEBUG nova.scheduler.client.report [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1058.848549] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "refresh_cache-ca8a238c-4b52-4016-8614-c2f8ad7891f7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1058.906356] env[61974]: DEBUG oslo_vmware.api [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379428, 'name': PowerOffVM_Task, 'duration_secs': 0.219602} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.906629] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1058.906805] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1058.907083] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-15209cd4-e4ca-42b5-88bb-13e999f2afd0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.978126] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1058.978126] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1058.978435] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Deleting the datastore file [datastore1] e3dc39a5-4e90-472d-8b62-fd17572852f7 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1058.978919] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fbcbd4f4-4b14-46f7-9c4c-be9729c0b53b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.985745] env[61974]: DEBUG oslo_vmware.api [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for the task: (returnval){ [ 1058.985745] env[61974]: value = "task-1379430" [ 1058.985745] env[61974]: _type = "Task" [ 1058.985745] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.995956] env[61974]: DEBUG oslo_vmware.api [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379430, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.023647] env[61974]: DEBUG nova.network.neutron [req-72e26be4-7a06-4720-b744-9f98473eaca9 req-ab55b9ad-4775-4605-9e6e-a5b68c29d790 service nova] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1059.192624] env[61974]: DEBUG nova.network.neutron [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Updating instance_info_cache with network_info: [{"id": "62349265-e925-44c7-8158-8bfcb7fc0478", "address": "fa:16:3e:6b:58:30", "network": {"id": "b42774a0-686b-4132-a599-7cec777b9919", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1826867553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dae05232e0041dba49b0432d64d82d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62349265-e9", "ovs_interfaceid": "62349265-e925-44c7-8158-8bfcb7fc0478", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.271037] env[61974]: DEBUG nova.network.neutron [req-72e26be4-7a06-4720-b744-9f98473eaca9 req-ab55b9ad-4775-4605-9e6e-a5b68c29d790 service nova] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.314282] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.759s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.318778] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.462s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.319157] env[61974]: DEBUG nova.objects.instance [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lazy-loading 'resources' on Instance uuid ceb0dd02-6441-4923-99f6-73f8eab86fe5 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1059.349894] env[61974]: INFO nova.scheduler.client.report [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Deleted allocations for instance f475d963-0c09-4115-885a-04e28895df14 [ 1059.496746] env[61974]: DEBUG oslo_vmware.api [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Task: {'id': task-1379430, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160754} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.497100] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1059.497202] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1059.497340] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1059.497526] env[61974]: INFO nova.compute.manager [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1059.497864] env[61974]: DEBUG oslo.service.loopingcall [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1059.498079] env[61974]: DEBUG nova.compute.manager [-] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1059.498177] env[61974]: DEBUG nova.network.neutron [-] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1059.582877] env[61974]: DEBUG nova.compute.manager [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1059.617319] env[61974]: DEBUG nova.virt.hardware [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1059.617677] env[61974]: DEBUG nova.virt.hardware [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1059.617792] env[61974]: DEBUG nova.virt.hardware [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1059.617964] env[61974]: DEBUG nova.virt.hardware [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1059.618137] env[61974]: DEBUG nova.virt.hardware [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1059.618291] env[61974]: DEBUG nova.virt.hardware [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1059.618498] env[61974]: DEBUG nova.virt.hardware [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1059.618659] env[61974]: DEBUG nova.virt.hardware [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1059.618828] env[61974]: DEBUG nova.virt.hardware [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1059.619367] env[61974]: DEBUG nova.virt.hardware [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1059.619367] env[61974]: DEBUG nova.virt.hardware [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1059.620075] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87b776c-ea11-452e-b2c2-7e8e10bc65ab {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.628468] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441206f8-9df6-4494-9a1d-0bbfd5b7ebde {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.695652] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "refresh_cache-bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1059.772478] env[61974]: DEBUG oslo_concurrency.lockutils [req-72e26be4-7a06-4720-b744-9f98473eaca9 req-ab55b9ad-4775-4605-9e6e-a5b68c29d790 service nova] Releasing lock "refresh_cache-ca8a238c-4b52-4016-8614-c2f8ad7891f7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1059.772831] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired lock "refresh_cache-ca8a238c-4b52-4016-8614-c2f8ad7891f7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.772990] env[61974]: DEBUG nova.network.neutron [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1059.856965] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e8437a07-152c-45c4-a6b5-f7d2e4c1e0e4 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "f475d963-0c09-4115-885a-04e28895df14" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.454s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.958492] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dfd87c9-ca3e-414e-bc49-78a38c284e62 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.966907] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf8ce9e-d8c9-46d0-9d8c-333e93a66931 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.998868] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821e941f-6a05-4347-98fe-4f340011b0bf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.006507] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cdfaa06-b565-4ee1-8070-8b43f3a9ed8c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.021359] env[61974]: DEBUG nova.compute.provider_tree [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.050615] env[61974]: DEBUG nova.network.neutron [-] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.216601] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f785fa-87e6-4bd2-b473-c6365996df04 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.236531] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc7b2be2-5f22-403a-95a7-5da8db04a358 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.244178] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Updating instance 'bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63' progress to 83 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1060.262345] env[61974]: DEBUG nova.network.neutron [-] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.306890] env[61974]: DEBUG nova.network.neutron [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1060.469223] env[61974]: DEBUG nova.compute.manager [req-a9927ad4-50d1-43ce-b61f-b008d47bf8de req-033e6f10-1eb6-4080-8097-92674d8071f7 service nova] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Received event network-vif-deleted-f76d592c-5eee-4379-b971-9896eb2bb538 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1060.469522] env[61974]: DEBUG nova.compute.manager [req-a9927ad4-50d1-43ce-b61f-b008d47bf8de req-033e6f10-1eb6-4080-8097-92674d8071f7 service nova] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Received event network-vif-deleted-3ccab2a0-7919-4a4a-953b-0abac563e24a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1060.524407] env[61974]: DEBUG nova.scheduler.client.report [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1060.553149] env[61974]: INFO nova.compute.manager [-] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Took 1.92 seconds to deallocate network for instance. [ 1060.565927] env[61974]: DEBUG nova.network.neutron [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Updating instance_info_cache with network_info: [{"id": "ff6934b6-3a62-4412-8f5f-06c123c3e260", "address": "fa:16:3e:ce:53:f8", "network": {"id": "bbb72d46-05ed-4ca3-80a8-0e9b6e6ccb5d", "bridge": "br-int", "label": "tempest-ServersTestJSON-148366285-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4757d24b61794cfcaefff2ad44e02b74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff6934b6-3a", "ovs_interfaceid": "ff6934b6-3a62-4412-8f5f-06c123c3e260", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.750517] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1060.750894] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ebfac221-75bc-48d6-ad47-de23adbe6142 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.760009] env[61974]: DEBUG oslo_vmware.api [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1060.760009] env[61974]: value = "task-1379431" [ 1060.760009] env[61974]: _type = "Task" [ 1060.760009] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.764886] env[61974]: INFO nova.compute.manager [-] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Took 1.27 seconds to deallocate network for instance. [ 1060.773031] env[61974]: DEBUG oslo_vmware.api [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379431, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.785038] env[61974]: DEBUG nova.network.neutron [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Successfully updated port: e43abc26-b25e-444f-9857-3967570e294a {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1060.883456] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "584ce365-9125-4c2a-9668-f921beb599e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.883710] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "584ce365-9125-4c2a-9668-f921beb599e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.033191] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.714s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.036026] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.435s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.036026] env[61974]: DEBUG nova.objects.instance [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lazy-loading 'pci_requests' on Instance uuid 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1061.056781] env[61974]: INFO nova.scheduler.client.report [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Deleted allocations for instance ceb0dd02-6441-4923-99f6-73f8eab86fe5 [ 1061.062873] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.071474] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Releasing lock "refresh_cache-ca8a238c-4b52-4016-8614-c2f8ad7891f7" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1061.071838] env[61974]: DEBUG nova.compute.manager [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Instance network_info: |[{"id": "ff6934b6-3a62-4412-8f5f-06c123c3e260", "address": "fa:16:3e:ce:53:f8", "network": {"id": "bbb72d46-05ed-4ca3-80a8-0e9b6e6ccb5d", "bridge": "br-int", "label": "tempest-ServersTestJSON-148366285-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4757d24b61794cfcaefff2ad44e02b74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff6934b6-3a", "ovs_interfaceid": "ff6934b6-3a62-4412-8f5f-06c123c3e260", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1061.072309] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:53:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4728adca-2846-416a-91a3-deb898faf1f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff6934b6-3a62-4412-8f5f-06c123c3e260', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1061.080139] env[61974]: DEBUG oslo.service.loopingcall [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1061.080713] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1061.080957] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e531c335-12b6-4f5b-9150-a8a25e154df4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.102574] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1061.102574] env[61974]: value = "task-1379432" [ 1061.102574] env[61974]: _type = "Task" [ 1061.102574] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.111817] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379432, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.270628] env[61974]: DEBUG oslo_vmware.api [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379431, 'name': PowerOnVM_Task, 'duration_secs': 0.473037} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.270931] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1061.271141] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a889195-b296-4e92-abc8-9dc551bfdb1d tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Updating instance 'bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63' progress to 100 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1061.275146] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.287448] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "refresh_cache-8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1061.287448] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired lock "refresh_cache-8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.287448] env[61974]: DEBUG nova.network.neutron [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1061.386459] env[61974]: DEBUG nova.compute.manager [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1061.540851] env[61974]: DEBUG nova.objects.instance [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lazy-loading 'numa_topology' on Instance uuid 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1061.564991] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1a7dbf50-05d2-4ea1-a38c-7858ec3401c7 tempest-AttachInterfacesTestJSON-1448499945 tempest-AttachInterfacesTestJSON-1448499945-project-member] Lock "ceb0dd02-6441-4923-99f6-73f8eab86fe5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.675s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.612854] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379432, 'name': CreateVM_Task, 'duration_secs': 0.365602} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.613039] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1061.613752] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1061.613924] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.614263] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1061.614514] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a9a3d12-6ccb-4c6c-ab30-acfdf5bdecde {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.619084] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1061.619084] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52696e07-a605-d7e7-1c44-d87d9438cbd4" [ 1061.619084] env[61974]: _type = "Task" [ 1061.619084] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.627509] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52696e07-a605-d7e7-1c44-d87d9438cbd4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.822107] env[61974]: DEBUG nova.network.neutron [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1061.910604] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.968812] env[61974]: DEBUG nova.network.neutron [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Updating instance_info_cache with network_info: [{"id": "e43abc26-b25e-444f-9857-3967570e294a", "address": "fa:16:3e:00:ac:ab", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape43abc26-b2", "ovs_interfaceid": "e43abc26-b25e-444f-9857-3967570e294a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.043117] env[61974]: INFO nova.compute.claims [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1062.131238] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52696e07-a605-d7e7-1c44-d87d9438cbd4, 'name': SearchDatastore_Task, 'duration_secs': 0.010513} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.131612] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1062.131820] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1062.132110] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1062.132232] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.132425] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1062.132709] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b5f7846-1df2-4521-86b8-db9c707ec9e9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.142860] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1062.143063] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1062.143799] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45807af2-4ae7-4826-a87a-36a34c21147e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.149507] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1062.149507] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]529bca8a-88a5-af5a-85c9-3b8fc3cb385e" [ 1062.149507] env[61974]: _type = "Task" [ 1062.149507] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.157925] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]529bca8a-88a5-af5a-85c9-3b8fc3cb385e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.472338] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Releasing lock "refresh_cache-8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1062.472618] env[61974]: DEBUG nova.compute.manager [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Instance network_info: |[{"id": "e43abc26-b25e-444f-9857-3967570e294a", "address": "fa:16:3e:00:ac:ab", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape43abc26-b2", "ovs_interfaceid": "e43abc26-b25e-444f-9857-3967570e294a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1062.473108] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:ac:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c791d09c-1086-4ee1-bcde-6ca7d259cabd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e43abc26-b25e-444f-9857-3967570e294a', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1062.480754] env[61974]: DEBUG oslo.service.loopingcall [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1062.480999] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1062.481243] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1207e8a7-a386-49fe-98af-8d069047fa65 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.498579] env[61974]: DEBUG nova.compute.manager [req-0917c7f3-256e-4515-aaab-32fa5015ca9d req-a62f5e1a-ce53-48c5-add9-95d48fd4ce52 service nova] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Received event network-vif-plugged-e43abc26-b25e-444f-9857-3967570e294a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1062.498882] env[61974]: DEBUG oslo_concurrency.lockutils [req-0917c7f3-256e-4515-aaab-32fa5015ca9d req-a62f5e1a-ce53-48c5-add9-95d48fd4ce52 service nova] Acquiring lock "8d71aaf0-e35c-4e6e-9094-d55b1544c3c8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1062.499123] env[61974]: DEBUG oslo_concurrency.lockutils [req-0917c7f3-256e-4515-aaab-32fa5015ca9d req-a62f5e1a-ce53-48c5-add9-95d48fd4ce52 service nova] Lock "8d71aaf0-e35c-4e6e-9094-d55b1544c3c8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.499306] env[61974]: DEBUG oslo_concurrency.lockutils [req-0917c7f3-256e-4515-aaab-32fa5015ca9d req-a62f5e1a-ce53-48c5-add9-95d48fd4ce52 service nova] Lock "8d71aaf0-e35c-4e6e-9094-d55b1544c3c8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.499483] env[61974]: DEBUG nova.compute.manager [req-0917c7f3-256e-4515-aaab-32fa5015ca9d req-a62f5e1a-ce53-48c5-add9-95d48fd4ce52 service nova] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] No waiting events found dispatching network-vif-plugged-e43abc26-b25e-444f-9857-3967570e294a {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1062.499714] env[61974]: WARNING nova.compute.manager [req-0917c7f3-256e-4515-aaab-32fa5015ca9d req-a62f5e1a-ce53-48c5-add9-95d48fd4ce52 service nova] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Received unexpected event network-vif-plugged-e43abc26-b25e-444f-9857-3967570e294a for instance with vm_state building and task_state spawning. [ 1062.499895] env[61974]: DEBUG nova.compute.manager [req-0917c7f3-256e-4515-aaab-32fa5015ca9d req-a62f5e1a-ce53-48c5-add9-95d48fd4ce52 service nova] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Received event network-changed-e43abc26-b25e-444f-9857-3967570e294a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1062.500104] env[61974]: DEBUG nova.compute.manager [req-0917c7f3-256e-4515-aaab-32fa5015ca9d req-a62f5e1a-ce53-48c5-add9-95d48fd4ce52 service nova] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Refreshing instance network info cache due to event network-changed-e43abc26-b25e-444f-9857-3967570e294a. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1062.500267] env[61974]: DEBUG oslo_concurrency.lockutils [req-0917c7f3-256e-4515-aaab-32fa5015ca9d req-a62f5e1a-ce53-48c5-add9-95d48fd4ce52 service nova] Acquiring lock "refresh_cache-8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1062.500442] env[61974]: DEBUG oslo_concurrency.lockutils [req-0917c7f3-256e-4515-aaab-32fa5015ca9d req-a62f5e1a-ce53-48c5-add9-95d48fd4ce52 service nova] Acquired lock "refresh_cache-8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.501166] env[61974]: DEBUG nova.network.neutron [req-0917c7f3-256e-4515-aaab-32fa5015ca9d req-a62f5e1a-ce53-48c5-add9-95d48fd4ce52 service nova] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Refreshing network info cache for port e43abc26-b25e-444f-9857-3967570e294a {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1062.511701] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1062.511701] env[61974]: value = "task-1379433" [ 1062.511701] env[61974]: _type = "Task" [ 1062.511701] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.520891] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379433, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.666536] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]529bca8a-88a5-af5a-85c9-3b8fc3cb385e, 'name': SearchDatastore_Task, 'duration_secs': 0.009606} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.667337] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cee2bb91-a705-49f7-926e-e74a78175e89 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.674133] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1062.674133] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5269bcfd-4782-0375-0cf8-96c3cf51f5ed" [ 1062.674133] env[61974]: _type = "Task" [ 1062.674133] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.683649] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5269bcfd-4782-0375-0cf8-96c3cf51f5ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.022627] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379433, 'name': CreateVM_Task, 'duration_secs': 0.320762} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.022799] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1063.023650] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1063.023824] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.024160] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1063.024413] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-252a811e-da75-4422-a777-bcc4c1904839 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.029694] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1063.029694] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]522df784-e2d3-1fd9-6b05-6fba1904647f" [ 1063.029694] env[61974]: _type = "Task" [ 1063.029694] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.039702] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]522df784-e2d3-1fd9-6b05-6fba1904647f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.190148] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5269bcfd-4782-0375-0cf8-96c3cf51f5ed, 'name': SearchDatastore_Task, 'duration_secs': 0.010874} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.194082] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1063.194082] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] ca8a238c-4b52-4016-8614-c2f8ad7891f7/ca8a238c-4b52-4016-8614-c2f8ad7891f7.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1063.194082] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-31420457-e7a0-426a-aafd-8fbf06f35741 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.204156] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1063.204156] env[61974]: value = "task-1379434" [ 1063.204156] env[61974]: _type = "Task" [ 1063.204156] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.206172] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ff27e1-d69e-4e61-bf11-33c229e54a43 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.219894] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379434, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.221661] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5fbd6e-1ff5-4b89-ae4d-fc527d4d1cdc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.254566] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d8f1d5-f8d0-44c1-ad56-4b2de2d57a35 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.263603] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-774a2c2d-0ae2-40eb-b492-3b281486cea8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.278688] env[61974]: DEBUG nova.compute.provider_tree [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1063.294073] env[61974]: DEBUG nova.network.neutron [req-0917c7f3-256e-4515-aaab-32fa5015ca9d req-a62f5e1a-ce53-48c5-add9-95d48fd4ce52 service nova] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Updated VIF entry in instance network info cache for port e43abc26-b25e-444f-9857-3967570e294a. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1063.294452] env[61974]: DEBUG nova.network.neutron [req-0917c7f3-256e-4515-aaab-32fa5015ca9d req-a62f5e1a-ce53-48c5-add9-95d48fd4ce52 service nova] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Updating instance_info_cache with network_info: [{"id": "e43abc26-b25e-444f-9857-3967570e294a", "address": "fa:16:3e:00:ac:ab", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape43abc26-b2", "ovs_interfaceid": "e43abc26-b25e-444f-9857-3967570e294a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.480935] env[61974]: DEBUG oslo_concurrency.lockutils [None req-46fcdfcb-c29f-4742-ad52-9813927196d2 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.481261] env[61974]: DEBUG oslo_concurrency.lockutils [None req-46fcdfcb-c29f-4742-ad52-9813927196d2 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.481463] env[61974]: DEBUG nova.compute.manager [None req-46fcdfcb-c29f-4742-ad52-9813927196d2 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Going to confirm migration 2 {{(pid=61974) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1063.541503] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]522df784-e2d3-1fd9-6b05-6fba1904647f, 'name': SearchDatastore_Task, 'duration_secs': 0.009628} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.542289] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1063.542289] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1063.542525] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1063.542525] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.542702] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1063.543474] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb28e7c2-329f-4703-929a-a0de18ba2fe2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.559636] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1063.559636] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1063.560365] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ca9fb3b-24ce-46a8-b1b9-9411928e303f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.567040] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1063.567040] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]524c6a91-7273-3fe5-c395-da93b70b2cc5" [ 1063.567040] env[61974]: _type = "Task" [ 1063.567040] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.578858] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]524c6a91-7273-3fe5-c395-da93b70b2cc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.498470] env[61974]: DEBUG nova.scheduler.client.report [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1064.502332] env[61974]: DEBUG oslo_concurrency.lockutils [req-0917c7f3-256e-4515-aaab-32fa5015ca9d req-a62f5e1a-ce53-48c5-add9-95d48fd4ce52 service nova] Releasing lock "refresh_cache-8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1064.504995] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379434, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.505219] env[61974]: WARNING oslo_vmware.common.loopingcall [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] task run outlasted interval by 0.299465 sec [ 1064.523913] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379434, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.574917} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.524173] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]524c6a91-7273-3fe5-c395-da93b70b2cc5, 'name': SearchDatastore_Task, 'duration_secs': 0.066203} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.524400] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] ca8a238c-4b52-4016-8614-c2f8ad7891f7/ca8a238c-4b52-4016-8614-c2f8ad7891f7.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1064.524636] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1064.526169] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-647eb0bf-b14f-4eb4-b57f-35908050d29c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.528878] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-855d0c7b-911c-4b43-8eee-02c85ecd9912 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.534547] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1064.534547] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52b28446-9ea7-5b30-9336-cee268436dc6" [ 1064.534547] env[61974]: _type = "Task" [ 1064.534547] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.538877] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1064.538877] env[61974]: value = "task-1379435" [ 1064.538877] env[61974]: _type = "Task" [ 1064.538877] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.545445] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52b28446-9ea7-5b30-9336-cee268436dc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.550856] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379435, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.557043] env[61974]: DEBUG oslo_concurrency.lockutils [None req-46fcdfcb-c29f-4742-ad52-9813927196d2 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "refresh_cache-bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1064.557234] env[61974]: DEBUG oslo_concurrency.lockutils [None req-46fcdfcb-c29f-4742-ad52-9813927196d2 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "refresh_cache-bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.557417] env[61974]: DEBUG nova.network.neutron [None req-46fcdfcb-c29f-4742-ad52-9813927196d2 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1064.557660] env[61974]: DEBUG nova.objects.instance [None req-46fcdfcb-c29f-4742-ad52-9813927196d2 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lazy-loading 'info_cache' on Instance uuid bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1065.008643] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.973s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1065.010774] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 7.373s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1065.046130] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52b28446-9ea7-5b30-9336-cee268436dc6, 'name': SearchDatastore_Task, 'duration_secs': 0.011197} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.046935] env[61974]: INFO nova.network.neutron [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Updating port e377f334-8d36-4f17-8532-abbd37c47eba with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1065.048867] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1065.049147] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8/8d71aaf0-e35c-4e6e-9094-d55b1544c3c8.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1065.049400] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-10bec8bf-1bca-4a9d-9ee6-976b6487b79e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.054226] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379435, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075748} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.055772] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1065.056493] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b8fb797-e077-472a-aa60-fd09d980bdde {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.062269] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1065.062269] env[61974]: value = "task-1379436" [ 1065.062269] env[61974]: _type = "Task" [ 1065.062269] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.081763] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] ca8a238c-4b52-4016-8614-c2f8ad7891f7/ca8a238c-4b52-4016-8614-c2f8ad7891f7.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1065.085705] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1731a0e0-006d-421a-a721-9125c99a2349 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.108233] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379436, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.109786] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1065.109786] env[61974]: value = "task-1379437" [ 1065.109786] env[61974]: _type = "Task" [ 1065.109786] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.119164] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379437, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.582325] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379436, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515673} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.582654] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8/8d71aaf0-e35c-4e6e-9094-d55b1544c3c8.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1065.582799] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1065.583067] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6c1ecab8-9cf8-4664-b950-371717855623 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.591095] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1065.591095] env[61974]: value = "task-1379438" [ 1065.591095] env[61974]: _type = "Task" [ 1065.591095] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.603108] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379438, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.622806] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379437, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.775160] env[61974]: DEBUG nova.network.neutron [None req-46fcdfcb-c29f-4742-ad52-9813927196d2 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Updating instance_info_cache with network_info: [{"id": "62349265-e925-44c7-8158-8bfcb7fc0478", "address": "fa:16:3e:6b:58:30", "network": {"id": "b42774a0-686b-4132-a599-7cec777b9919", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1826867553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dae05232e0041dba49b0432d64d82d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62349265-e9", "ovs_interfaceid": "62349265-e925-44c7-8158-8bfcb7fc0478", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.021367] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Applying migration context for instance bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63 as it has an incoming, in-progress migration 897477ea-f863-4278-ac49-cbd1a2013020. Migration status is confirming {{(pid=61974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1066.022507] env[61974]: INFO nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Updating resource usage from migration 897477ea-f863-4278-ac49-cbd1a2013020 [ 1066.041754] env[61974]: WARNING nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 1c1404fd-a954-4849-883b-7898a7e87e2b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1066.041923] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance b1fa5433-8f26-48db-a19d-d1e11245fb44 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1066.042144] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 59c72be0-46de-4cb8-93d6-0a2c70c90e2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1066.042287] env[61974]: WARNING nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance e3dc39a5-4e90-472d-8b62-fd17572852f7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1066.042407] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Migration 897477ea-f863-4278-ac49-cbd1a2013020 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1066.042522] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1066.042635] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance ca8a238c-4b52-4016-8614-c2f8ad7891f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1066.042753] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1066.042923] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1066.100822] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379438, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070862} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.101183] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1066.101939] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73fed12-bb65-4770-b946-0dd3db49e0a3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.123807] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8/8d71aaf0-e35c-4e6e-9094-d55b1544c3c8.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1066.126832] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f55f2f3-1707-401b-9799-41f462d39917 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.144757] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379437, 'name': ReconfigVM_Task, 'duration_secs': 0.586251} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.145767] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Reconfigured VM instance instance-00000061 to attach disk [datastore2] ca8a238c-4b52-4016-8614-c2f8ad7891f7/ca8a238c-4b52-4016-8614-c2f8ad7891f7.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1066.146397] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1066.146397] env[61974]: value = "task-1379439" [ 1066.146397] env[61974]: _type = "Task" [ 1066.146397] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.146579] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d1364a97-e21f-4d7b-9ca3-70d900888db8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.155629] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379439, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.156729] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1066.156729] env[61974]: value = "task-1379440" [ 1066.156729] env[61974]: _type = "Task" [ 1066.156729] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.163249] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379440, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.277751] env[61974]: DEBUG oslo_concurrency.lockutils [None req-46fcdfcb-c29f-4742-ad52-9813927196d2 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "refresh_cache-bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1066.278075] env[61974]: DEBUG nova.objects.instance [None req-46fcdfcb-c29f-4742-ad52-9813927196d2 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lazy-loading 'migration_context' on Instance uuid bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.545856] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 584ce365-9125-4c2a-9668-f921beb599e0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1066.546192] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1066.546402] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1920MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1066.570327] env[61974]: DEBUG nova.compute.manager [req-ad293310-9876-443c-99e8-d18015199d51 req-6787e6d4-d11f-4185-b1e5-bc6f1a9f7ffa service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Received event network-vif-plugged-e377f334-8d36-4f17-8532-abbd37c47eba {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1066.570518] env[61974]: DEBUG oslo_concurrency.lockutils [req-ad293310-9876-443c-99e8-d18015199d51 req-6787e6d4-d11f-4185-b1e5-bc6f1a9f7ffa service nova] Acquiring lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.570759] env[61974]: DEBUG oslo_concurrency.lockutils [req-ad293310-9876-443c-99e8-d18015199d51 req-6787e6d4-d11f-4185-b1e5-bc6f1a9f7ffa service nova] Lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1066.570950] env[61974]: DEBUG oslo_concurrency.lockutils [req-ad293310-9876-443c-99e8-d18015199d51 req-6787e6d4-d11f-4185-b1e5-bc6f1a9f7ffa service nova] Lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1066.574443] env[61974]: DEBUG nova.compute.manager [req-ad293310-9876-443c-99e8-d18015199d51 req-6787e6d4-d11f-4185-b1e5-bc6f1a9f7ffa service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] No waiting events found dispatching network-vif-plugged-e377f334-8d36-4f17-8532-abbd37c47eba {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1066.574653] env[61974]: WARNING nova.compute.manager [req-ad293310-9876-443c-99e8-d18015199d51 req-6787e6d4-d11f-4185-b1e5-bc6f1a9f7ffa service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Received unexpected event network-vif-plugged-e377f334-8d36-4f17-8532-abbd37c47eba for instance with vm_state shelved_offloaded and task_state spawning. [ 1066.659933] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379439, 'name': ReconfigVM_Task, 'duration_secs': 0.281315} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.663183] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8/8d71aaf0-e35c-4e6e-9094-d55b1544c3c8.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1066.663888] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2f024e9-4078-427f-aaeb-f7c8b8d8c447 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.670488] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379440, 'name': Rename_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.673981] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1066.673981] env[61974]: value = "task-1379441" [ 1066.673981] env[61974]: _type = "Task" [ 1066.673981] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.677842] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "refresh_cache-1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1066.678040] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquired lock "refresh_cache-1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.678230] env[61974]: DEBUG nova.network.neutron [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1066.683663] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379441, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.773629] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd803bc-447d-49dd-8068-bb12a3c7c82a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.781800] env[61974]: DEBUG nova.objects.base [None req-46fcdfcb-c29f-4742-ad52-9813927196d2 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=61974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1066.782904] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4cd4394-90eb-4ad4-ad13-7bbb00e04e2c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.786529] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55383df6-1982-4252-80c5-50eaf3eb4a88 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.828831] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99494d96-0c33-4b50-939e-a77197c8fc91 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.832502] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b7e44d-2be6-417f-a753-ad59908e1344 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.840682] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e44e02-4232-47d6-b37f-9b2a68ef6427 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.844739] env[61974]: DEBUG oslo_vmware.api [None req-46fcdfcb-c29f-4742-ad52-9813927196d2 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1066.844739] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52002723-8ab4-6e43-abce-21c7541229de" [ 1066.844739] env[61974]: _type = "Task" [ 1066.844739] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.855360] env[61974]: DEBUG nova.compute.provider_tree [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1066.861335] env[61974]: DEBUG oslo_vmware.api [None req-46fcdfcb-c29f-4742-ad52-9813927196d2 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52002723-8ab4-6e43-abce-21c7541229de, 'name': SearchDatastore_Task, 'duration_secs': 0.006631} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.861585] env[61974]: DEBUG oslo_concurrency.lockutils [None req-46fcdfcb-c29f-4742-ad52-9813927196d2 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.171016] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379440, 'name': Rename_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.182745] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379441, 'name': Rename_Task, 'duration_secs': 0.138486} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.183030] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1067.183275] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f754ffac-17b1-4268-afba-8c47c6ed849c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.189684] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1067.189684] env[61974]: value = "task-1379442" [ 1067.189684] env[61974]: _type = "Task" [ 1067.189684] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.196746] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379442, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.358350] env[61974]: DEBUG nova.scheduler.client.report [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1067.421649] env[61974]: DEBUG nova.network.neutron [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Updating instance_info_cache with network_info: [{"id": "e377f334-8d36-4f17-8532-abbd37c47eba", "address": "fa:16:3e:7f:63:47", "network": {"id": "a6e61508-1f16-48a9-a21d-2f9212fcf523", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1486161933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb6e7e7e52fc4aacaf5054732cd7d2df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ccbc7a-cf8d-4ea2-8411-291a1e27df7b", "external-id": "nsx-vlan-transportzone-998", "segmentation_id": 998, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape377f334-8d", "ovs_interfaceid": "e377f334-8d36-4f17-8532-abbd37c47eba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.669986] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379440, 'name': Rename_Task, 'duration_secs': 1.145344} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.670351] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1067.670554] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-518e4144-3ff0-4bf0-93f6-4c840a6197cf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.676547] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1067.676547] env[61974]: value = "task-1379443" [ 1067.676547] env[61974]: _type = "Task" [ 1067.676547] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.684206] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379443, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.699031] env[61974]: DEBUG oslo_vmware.api [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379442, 'name': PowerOnVM_Task, 'duration_secs': 0.423903} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.699283] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1067.699500] env[61974]: INFO nova.compute.manager [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Took 8.12 seconds to spawn the instance on the hypervisor. [ 1067.699744] env[61974]: DEBUG nova.compute.manager [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1067.700557] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c738b5f-ecee-4299-84f6-dec49c35563d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.867528] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1067.867932] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.857s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.868240] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.806s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.868441] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.870501] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.595s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.870707] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.872814] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.963s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.874482] env[61974]: INFO nova.compute.claims [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1067.898129] env[61974]: INFO nova.scheduler.client.report [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Deleted allocations for instance e3dc39a5-4e90-472d-8b62-fd17572852f7 [ 1067.900698] env[61974]: INFO nova.scheduler.client.report [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Deleted allocations for instance 1c1404fd-a954-4849-883b-7898a7e87e2b [ 1067.925440] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Releasing lock "refresh_cache-1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1067.954034] env[61974]: DEBUG nova.virt.hardware [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='49d790949ba0bd5e20a94a4b761638a9',container_format='bare',created_at=2024-10-29T21:01:41Z,direct_url=,disk_format='vmdk',id=7e8de138-c44e-4516-9083-c48e99a4114f,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1213937782-shelved',owner='fb6e7e7e52fc4aacaf5054732cd7d2df',properties=ImageMetaProps,protected=,size=31667712,status='active',tags=,updated_at=2024-10-29T21:01:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1067.954341] env[61974]: DEBUG nova.virt.hardware [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1067.954535] env[61974]: DEBUG nova.virt.hardware [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1067.954751] env[61974]: DEBUG nova.virt.hardware [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1067.954930] env[61974]: DEBUG nova.virt.hardware [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1067.955123] env[61974]: DEBUG nova.virt.hardware [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1067.955351] env[61974]: DEBUG nova.virt.hardware [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1067.955539] env[61974]: DEBUG nova.virt.hardware [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1067.955725] env[61974]: DEBUG nova.virt.hardware [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1067.955921] env[61974]: DEBUG nova.virt.hardware [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1067.956128] env[61974]: DEBUG nova.virt.hardware [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1067.958094] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d504b63a-355e-4b98-8917-2d4921c89939 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.966693] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a6278f-85a8-469f-a6b8-39a4ebedb39f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.980299] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:63:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '04ccbc7a-cf8d-4ea2-8411-291a1e27df7b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e377f334-8d36-4f17-8532-abbd37c47eba', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1067.987667] env[61974]: DEBUG oslo.service.loopingcall [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1067.987927] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1067.988189] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0270869-34a9-495f-be60-0aacc1f467dc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.007286] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1068.007286] env[61974]: value = "task-1379444" [ 1068.007286] env[61974]: _type = "Task" [ 1068.007286] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.016152] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379444, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.186754] env[61974]: DEBUG oslo_vmware.api [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379443, 'name': PowerOnVM_Task, 'duration_secs': 0.445858} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.187048] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1068.187299] env[61974]: INFO nova.compute.manager [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Took 10.85 seconds to spawn the instance on the hypervisor. [ 1068.187507] env[61974]: DEBUG nova.compute.manager [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1068.188321] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a398aa21-98c1-4615-9319-f8be79abcbda {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.219957] env[61974]: INFO nova.compute.manager [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Took 20.58 seconds to build instance. [ 1068.412255] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9f964f87-d144-4a93-ae7e-9883febaaf53 tempest-ImagesTestJSON-805938939 tempest-ImagesTestJSON-805938939-project-member] Lock "e3dc39a5-4e90-472d-8b62-fd17572852f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.046s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1068.413368] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0b1048a5-1a57-4069-9b16-9d13648559c0 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "1c1404fd-a954-4849-883b-7898a7e87e2b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.903s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1068.517573] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379444, 'name': CreateVM_Task, 'duration_secs': 0.307294} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.517573] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1068.518802] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7e8de138-c44e-4516-9083-c48e99a4114f" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1068.518802] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7e8de138-c44e-4516-9083-c48e99a4114f" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.518802] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/7e8de138-c44e-4516-9083-c48e99a4114f" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1068.518802] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf7dd50a-4db8-4fd4-b431-5b5fe53134a5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.523070] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 1068.523070] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]529e3771-10e9-c1a9-88bd-3bbc470451aa" [ 1068.523070] env[61974]: _type = "Task" [ 1068.523070] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.530440] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]529e3771-10e9-c1a9-88bd-3bbc470451aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.594924] env[61974]: DEBUG nova.compute.manager [req-165ff990-4d21-493c-810c-b24beea4a1ec req-fd6cae0a-a7bd-4b48-bdd9-f5f00915fdf9 service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Received event network-changed-e377f334-8d36-4f17-8532-abbd37c47eba {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1068.595224] env[61974]: DEBUG nova.compute.manager [req-165ff990-4d21-493c-810c-b24beea4a1ec req-fd6cae0a-a7bd-4b48-bdd9-f5f00915fdf9 service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Refreshing instance network info cache due to event network-changed-e377f334-8d36-4f17-8532-abbd37c47eba. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1068.595497] env[61974]: DEBUG oslo_concurrency.lockutils [req-165ff990-4d21-493c-810c-b24beea4a1ec req-fd6cae0a-a7bd-4b48-bdd9-f5f00915fdf9 service nova] Acquiring lock "refresh_cache-1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1068.595717] env[61974]: DEBUG oslo_concurrency.lockutils [req-165ff990-4d21-493c-810c-b24beea4a1ec req-fd6cae0a-a7bd-4b48-bdd9-f5f00915fdf9 service nova] Acquired lock "refresh_cache-1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.595929] env[61974]: DEBUG nova.network.neutron [req-165ff990-4d21-493c-810c-b24beea4a1ec req-fd6cae0a-a7bd-4b48-bdd9-f5f00915fdf9 service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Refreshing network info cache for port e377f334-8d36-4f17-8532-abbd37c47eba {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1068.706195] env[61974]: INFO nova.compute.manager [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Took 21.35 seconds to build instance. [ 1068.723525] env[61974]: DEBUG oslo_concurrency.lockutils [None req-c6983d29-4e63-4fb0-b46c-9e20ac82245f tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.088s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.040324] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7e8de138-c44e-4516-9083-c48e99a4114f" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1069.041057] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Processing image 7e8de138-c44e-4516-9083-c48e99a4114f {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1069.041057] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/7e8de138-c44e-4516-9083-c48e99a4114f/7e8de138-c44e-4516-9083-c48e99a4114f.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1069.041212] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquired lock "[datastore1] devstack-image-cache_base/7e8de138-c44e-4516-9083-c48e99a4114f/7e8de138-c44e-4516-9083-c48e99a4114f.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.041378] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1069.042279] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35dc390a-900f-4b1a-9b36-76d6d40ade20 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.044820] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1fce38e0-bc8b-41c4-9a40-cdcba516311c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.052725] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04facad3-37b9-4785-89bf-1b66cd0fd5a5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.089314] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c4e4e2-796a-4496-b905-53665bbe00c0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.093087] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1069.093373] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1069.094229] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7af6acfe-b448-48e5-a956-3856ca3732c1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.103609] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 1069.103609] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52d9ed19-91d3-ce65-0b62-518cf41feabf" [ 1069.103609] env[61974]: _type = "Task" [ 1069.103609] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.105200] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f531c8-f180-42d0-a4b8-dc4d429577db {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.122879] env[61974]: DEBUG nova.compute.provider_tree [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1069.127773] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Preparing fetch location {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1069.128125] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Fetch image to [datastore1] OSTACK_IMG_95b9d74c-f086-4a8b-bfdf-f2ffa69369dd/OSTACK_IMG_95b9d74c-f086-4a8b-bfdf-f2ffa69369dd.vmdk {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1069.128376] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Downloading stream optimized image 7e8de138-c44e-4516-9083-c48e99a4114f to [datastore1] OSTACK_IMG_95b9d74c-f086-4a8b-bfdf-f2ffa69369dd/OSTACK_IMG_95b9d74c-f086-4a8b-bfdf-f2ffa69369dd.vmdk on the data store datastore1 as vApp {{(pid=61974) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1069.128595] env[61974]: DEBUG nova.virt.vmwareapi.images [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Downloading image file data 7e8de138-c44e-4516-9083-c48e99a4114f to the ESX as VM named 'OSTACK_IMG_95b9d74c-f086-4a8b-bfdf-f2ffa69369dd' {{(pid=61974) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1069.211121] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e4a4440e-8fda-48d1-92a7-b1619173fe7c tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "ca8a238c-4b52-4016-8614-c2f8ad7891f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.867s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.214202] env[61974]: DEBUG oslo_vmware.rw_handles [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1069.214202] env[61974]: value = "resgroup-9" [ 1069.214202] env[61974]: _type = "ResourcePool" [ 1069.214202] env[61974]: }. {{(pid=61974) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1069.214623] env[61974]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-c7f951a1-7424-41f9-984c-a3e8a8682c9d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.232105] env[61974]: DEBUG nova.compute.manager [req-7d739d1d-cf9a-4d12-97a2-a722c6b1b335 req-b69882d1-4e6c-4d9c-9257-cb2834b4ae99 service nova] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Received event network-changed-e43abc26-b25e-444f-9857-3967570e294a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1069.232313] env[61974]: DEBUG nova.compute.manager [req-7d739d1d-cf9a-4d12-97a2-a722c6b1b335 req-b69882d1-4e6c-4d9c-9257-cb2834b4ae99 service nova] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Refreshing instance network info cache due to event network-changed-e43abc26-b25e-444f-9857-3967570e294a. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1069.232950] env[61974]: DEBUG oslo_concurrency.lockutils [req-7d739d1d-cf9a-4d12-97a2-a722c6b1b335 req-b69882d1-4e6c-4d9c-9257-cb2834b4ae99 service nova] Acquiring lock "refresh_cache-8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1069.232950] env[61974]: DEBUG oslo_concurrency.lockutils [req-7d739d1d-cf9a-4d12-97a2-a722c6b1b335 req-b69882d1-4e6c-4d9c-9257-cb2834b4ae99 service nova] Acquired lock "refresh_cache-8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.232950] env[61974]: DEBUG nova.network.neutron [req-7d739d1d-cf9a-4d12-97a2-a722c6b1b335 req-b69882d1-4e6c-4d9c-9257-cb2834b4ae99 service nova] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Refreshing network info cache for port e43abc26-b25e-444f-9857-3967570e294a {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1069.242165] env[61974]: DEBUG oslo_vmware.rw_handles [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lease: (returnval){ [ 1069.242165] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52f36598-2b04-eed8-32dc-fa3001b18db6" [ 1069.242165] env[61974]: _type = "HttpNfcLease" [ 1069.242165] env[61974]: } obtained for vApp import into resource pool (val){ [ 1069.242165] env[61974]: value = "resgroup-9" [ 1069.242165] env[61974]: _type = "ResourcePool" [ 1069.242165] env[61974]: }. {{(pid=61974) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1069.242471] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the lease: (returnval){ [ 1069.242471] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52f36598-2b04-eed8-32dc-fa3001b18db6" [ 1069.242471] env[61974]: _type = "HttpNfcLease" [ 1069.242471] env[61974]: } to be ready. {{(pid=61974) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1069.249017] env[61974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1069.249017] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52f36598-2b04-eed8-32dc-fa3001b18db6" [ 1069.249017] env[61974]: _type = "HttpNfcLease" [ 1069.249017] env[61974]: } is initializing. {{(pid=61974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1069.631079] env[61974]: DEBUG nova.scheduler.client.report [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1069.753192] env[61974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1069.753192] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52f36598-2b04-eed8-32dc-fa3001b18db6" [ 1069.753192] env[61974]: _type = "HttpNfcLease" [ 1069.753192] env[61974]: } is initializing. {{(pid=61974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1069.784330] env[61974]: DEBUG nova.network.neutron [req-165ff990-4d21-493c-810c-b24beea4a1ec req-fd6cae0a-a7bd-4b48-bdd9-f5f00915fdf9 service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Updated VIF entry in instance network info cache for port e377f334-8d36-4f17-8532-abbd37c47eba. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1069.784330] env[61974]: DEBUG nova.network.neutron [req-165ff990-4d21-493c-810c-b24beea4a1ec req-fd6cae0a-a7bd-4b48-bdd9-f5f00915fdf9 service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Updating instance_info_cache with network_info: [{"id": "e377f334-8d36-4f17-8532-abbd37c47eba", "address": "fa:16:3e:7f:63:47", "network": {"id": "a6e61508-1f16-48a9-a21d-2f9212fcf523", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1486161933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb6e7e7e52fc4aacaf5054732cd7d2df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ccbc7a-cf8d-4ea2-8411-291a1e27df7b", "external-id": "nsx-vlan-transportzone-998", "segmentation_id": 998, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape377f334-8d", "ovs_interfaceid": "e377f334-8d36-4f17-8532-abbd37c47eba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.137023] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.262s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.137023] env[61974]: DEBUG nova.compute.manager [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1070.137873] env[61974]: DEBUG oslo_concurrency.lockutils [None req-46fcdfcb-c29f-4742-ad52-9813927196d2 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 3.276s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.253209] env[61974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1070.253209] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52f36598-2b04-eed8-32dc-fa3001b18db6" [ 1070.253209] env[61974]: _type = "HttpNfcLease" [ 1070.253209] env[61974]: } is initializing. {{(pid=61974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1070.286217] env[61974]: DEBUG oslo_concurrency.lockutils [req-165ff990-4d21-493c-810c-b24beea4a1ec req-fd6cae0a-a7bd-4b48-bdd9-f5f00915fdf9 service nova] Releasing lock "refresh_cache-1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1070.307820] env[61974]: DEBUG nova.network.neutron [req-7d739d1d-cf9a-4d12-97a2-a722c6b1b335 req-b69882d1-4e6c-4d9c-9257-cb2834b4ae99 service nova] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Updated VIF entry in instance network info cache for port e43abc26-b25e-444f-9857-3967570e294a. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1070.308238] env[61974]: DEBUG nova.network.neutron [req-7d739d1d-cf9a-4d12-97a2-a722c6b1b335 req-b69882d1-4e6c-4d9c-9257-cb2834b4ae99 service nova] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Updating instance_info_cache with network_info: [{"id": "e43abc26-b25e-444f-9857-3967570e294a", "address": "fa:16:3e:00:ac:ab", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.130", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape43abc26-b2", "ovs_interfaceid": "e43abc26-b25e-444f-9857-3967570e294a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.365299] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "ca8a238c-4b52-4016-8614-c2f8ad7891f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.365600] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "ca8a238c-4b52-4016-8614-c2f8ad7891f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.365895] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "ca8a238c-4b52-4016-8614-c2f8ad7891f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.366173] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "ca8a238c-4b52-4016-8614-c2f8ad7891f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.366391] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "ca8a238c-4b52-4016-8614-c2f8ad7891f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.369242] env[61974]: INFO nova.compute.manager [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Terminating instance [ 1070.371476] env[61974]: DEBUG nova.compute.manager [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1070.371747] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1070.372614] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70f3dfa-bd29-41d3-9abf-8e9d3370cf2f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.380485] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1070.380788] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-002203ec-bf3f-4ed4-a4ba-47122496d815 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.386915] env[61974]: DEBUG oslo_vmware.api [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1070.386915] env[61974]: value = "task-1379446" [ 1070.386915] env[61974]: _type = "Task" [ 1070.386915] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.395332] env[61974]: DEBUG oslo_vmware.api [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379446, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.642835] env[61974]: DEBUG nova.compute.utils [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1070.643656] env[61974]: DEBUG nova.compute.manager [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1070.643846] env[61974]: DEBUG nova.network.neutron [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1070.690754] env[61974]: DEBUG nova.policy [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5fb3973c32a645fb82106b90ee5e33a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd104a741ebad47748ae5646356589fce', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1070.752245] env[61974]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1070.752245] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52f36598-2b04-eed8-32dc-fa3001b18db6" [ 1070.752245] env[61974]: _type = "HttpNfcLease" [ 1070.752245] env[61974]: } is ready. {{(pid=61974) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1070.754399] env[61974]: DEBUG oslo_vmware.rw_handles [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1070.754399] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52f36598-2b04-eed8-32dc-fa3001b18db6" [ 1070.754399] env[61974]: _type = "HttpNfcLease" [ 1070.754399] env[61974]: }. {{(pid=61974) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1070.756028] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a0540aa-7dcf-4a34-9c55-7e4072d4edaf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.764589] env[61974]: DEBUG oslo_vmware.rw_handles [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5287eb29-9283-4a0f-3d31-8d01a486bdae/disk-0.vmdk from lease info. {{(pid=61974) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1070.764752] env[61974]: DEBUG oslo_vmware.rw_handles [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Creating HTTP connection to write to file with size = 31667712 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5287eb29-9283-4a0f-3d31-8d01a486bdae/disk-0.vmdk. {{(pid=61974) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1070.831015] env[61974]: DEBUG oslo_concurrency.lockutils [req-7d739d1d-cf9a-4d12-97a2-a722c6b1b335 req-b69882d1-4e6c-4d9c-9257-cb2834b4ae99 service nova] Releasing lock "refresh_cache-8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1070.833258] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e6de71-f5cb-49c9-a83f-273928be029f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.843193] env[61974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a9c52d2a-c39a-48b8-97cd-03385fb29bab {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.850538] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26fb8fa8-d5f5-440f-a959-229341618d1a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.886872] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11897b2d-492a-41f0-905e-5a2a2455c47f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.897779] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e33a59-0504-4742-a375-92ae169b4b2c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.904476] env[61974]: DEBUG oslo_vmware.api [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379446, 'name': PowerOffVM_Task, 'duration_secs': 0.201669} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.905091] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1070.905278] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1070.905520] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a737a07b-8bf0-44f9-b646-cd285c43d7d4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.914535] env[61974]: DEBUG nova.compute.provider_tree [None req-46fcdfcb-c29f-4742-ad52-9813927196d2 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1070.985275] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1070.986533] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1070.986533] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Deleting the datastore file [datastore2] ca8a238c-4b52-4016-8614-c2f8ad7891f7 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1070.986533] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-919d0da2-8100-47cd-8c8e-5dd429c9e917 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.991962] env[61974]: DEBUG oslo_vmware.api [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1070.991962] env[61974]: value = "task-1379448" [ 1070.991962] env[61974]: _type = "Task" [ 1070.991962] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.999682] env[61974]: DEBUG oslo_vmware.api [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379448, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.023041] env[61974]: DEBUG nova.network.neutron [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Successfully created port: a2cdd452-355a-4078-ad38-aba9f8f7e20b {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1071.147769] env[61974]: DEBUG nova.compute.manager [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1071.420631] env[61974]: DEBUG nova.scheduler.client.report [None req-46fcdfcb-c29f-4742-ad52-9813927196d2 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1071.506128] env[61974]: DEBUG oslo_vmware.api [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379448, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141183} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.509126] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1071.509243] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1071.509402] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1071.509635] env[61974]: INFO nova.compute.manager [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1071.509990] env[61974]: DEBUG oslo.service.loopingcall [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1071.510280] env[61974]: DEBUG nova.compute.manager [-] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1071.510439] env[61974]: DEBUG nova.network.neutron [-] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1071.745300] env[61974]: DEBUG nova.compute.manager [req-3a9e91ce-078a-4bbb-8377-724c5d47e293 req-d6fd693f-8cab-420c-9ddf-36f6efa1fffc service nova] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Received event network-vif-deleted-ff6934b6-3a62-4412-8f5f-06c123c3e260 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1071.745411] env[61974]: INFO nova.compute.manager [req-3a9e91ce-078a-4bbb-8377-724c5d47e293 req-d6fd693f-8cab-420c-9ddf-36f6efa1fffc service nova] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Neutron deleted interface ff6934b6-3a62-4412-8f5f-06c123c3e260; detaching it from the instance and deleting it from the info cache [ 1071.745595] env[61974]: DEBUG nova.network.neutron [req-3a9e91ce-078a-4bbb-8377-724c5d47e293 req-d6fd693f-8cab-420c-9ddf-36f6efa1fffc service nova] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.009052] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "4d4f5746-5873-4933-8741-c07ca43c13cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.009396] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "4d4f5746-5873-4933-8741-c07ca43c13cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.164805] env[61974]: DEBUG nova.compute.manager [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1072.192636] env[61974]: DEBUG nova.virt.hardware [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1072.192636] env[61974]: DEBUG nova.virt.hardware [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1072.192801] env[61974]: DEBUG nova.virt.hardware [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1072.192937] env[61974]: DEBUG nova.virt.hardware [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1072.193103] env[61974]: DEBUG nova.virt.hardware [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1072.193254] env[61974]: DEBUG nova.virt.hardware [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1072.193465] env[61974]: DEBUG nova.virt.hardware [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1072.193622] env[61974]: DEBUG nova.virt.hardware [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1072.193808] env[61974]: DEBUG nova.virt.hardware [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1072.194232] env[61974]: DEBUG nova.virt.hardware [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1072.194232] env[61974]: DEBUG nova.virt.hardware [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1072.195097] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8853b4f6-edaf-4e69-9709-6fcee4e249c1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.206873] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15553b93-5b52-4dda-9f4b-37f161a18f82 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.221420] env[61974]: DEBUG nova.network.neutron [-] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.224317] env[61974]: DEBUG oslo_vmware.rw_handles [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Completed reading data from the image iterator. {{(pid=61974) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1072.224525] env[61974]: DEBUG oslo_vmware.rw_handles [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5287eb29-9283-4a0f-3d31-8d01a486bdae/disk-0.vmdk. {{(pid=61974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1072.225326] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b89b9b-66d7-4629-8c84-b2daedca29a7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.231568] env[61974]: DEBUG oslo_vmware.rw_handles [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5287eb29-9283-4a0f-3d31-8d01a486bdae/disk-0.vmdk is in state: ready. {{(pid=61974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1072.231746] env[61974]: DEBUG oslo_vmware.rw_handles [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5287eb29-9283-4a0f-3d31-8d01a486bdae/disk-0.vmdk. {{(pid=61974) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1072.232542] env[61974]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-090fa69f-2940-4375-b74f-57215dc56340 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.247957] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d2141b92-ddec-47cf-b4d2-b1298e18dc2b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.258159] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f01b98-e71e-4ab8-b54b-5a664a2f1e58 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.286246] env[61974]: DEBUG nova.compute.manager [req-3a9e91ce-078a-4bbb-8377-724c5d47e293 req-d6fd693f-8cab-420c-9ddf-36f6efa1fffc service nova] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Detach interface failed, port_id=ff6934b6-3a62-4412-8f5f-06c123c3e260, reason: Instance ca8a238c-4b52-4016-8614-c2f8ad7891f7 could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1072.436809] env[61974]: DEBUG oslo_concurrency.lockutils [None req-46fcdfcb-c29f-4742-ad52-9813927196d2 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.299s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.512454] env[61974]: DEBUG nova.compute.manager [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1072.729303] env[61974]: INFO nova.compute.manager [-] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Took 1.22 seconds to deallocate network for instance. [ 1072.846213] env[61974]: DEBUG nova.network.neutron [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Successfully updated port: a2cdd452-355a-4078-ad38-aba9f8f7e20b {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1072.853133] env[61974]: DEBUG oslo_vmware.rw_handles [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5287eb29-9283-4a0f-3d31-8d01a486bdae/disk-0.vmdk. {{(pid=61974) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1072.853553] env[61974]: INFO nova.virt.vmwareapi.images [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Downloaded image file data 7e8de138-c44e-4516-9083-c48e99a4114f [ 1072.854593] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ddce937-1731-467a-9344-982449f58f4f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.872590] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-100a8af2-d27a-4d9e-89c5-c5aa8b0cfe7e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.945971] env[61974]: INFO nova.virt.vmwareapi.images [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] The imported VM was unregistered [ 1072.950237] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Caching image {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1072.950502] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Creating directory with path [datastore1] devstack-image-cache_base/7e8de138-c44e-4516-9083-c48e99a4114f {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1072.951618] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c04f242b-f6e5-4768-82a1-eacf237add72 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.965251] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Created directory with path [datastore1] devstack-image-cache_base/7e8de138-c44e-4516-9083-c48e99a4114f {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1072.965552] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_95b9d74c-f086-4a8b-bfdf-f2ffa69369dd/OSTACK_IMG_95b9d74c-f086-4a8b-bfdf-f2ffa69369dd.vmdk to [datastore1] devstack-image-cache_base/7e8de138-c44e-4516-9083-c48e99a4114f/7e8de138-c44e-4516-9083-c48e99a4114f.vmdk. {{(pid=61974) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1072.968634] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-613e0e9c-87b5-4691-ad50-8ecd3d91e5cc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.975122] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 1072.975122] env[61974]: value = "task-1379450" [ 1072.975122] env[61974]: _type = "Task" [ 1072.975122] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.983948] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379450, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.008072] env[61974]: INFO nova.scheduler.client.report [None req-46fcdfcb-c29f-4742-ad52-9813927196d2 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Deleted allocation for migration 897477ea-f863-4278-ac49-cbd1a2013020 [ 1073.039023] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.039023] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.039023] env[61974]: INFO nova.compute.claims [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1073.236795] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.349439] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "refresh_cache-584ce365-9125-4c2a-9668-f921beb599e0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1073.349629] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired lock "refresh_cache-584ce365-9125-4c2a-9668-f921beb599e0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.349816] env[61974]: DEBUG nova.network.neutron [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1073.439856] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.486341] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379450, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.516515] env[61974]: DEBUG oslo_concurrency.lockutils [None req-46fcdfcb-c29f-4742-ad52-9813927196d2 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 10.034s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.516515] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.077s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.516708] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.516869] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.518083] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.520969] env[61974]: INFO nova.compute.manager [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Terminating instance [ 1073.523096] env[61974]: DEBUG nova.compute.manager [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1073.523516] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1073.524262] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7548b8-056d-42a4-9e63-bd56a7cd8655 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.537171] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1073.537469] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f65c02e8-6205-4f51-a2df-fa322ccdcd34 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.551530] env[61974]: DEBUG oslo_vmware.api [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1073.551530] env[61974]: value = "task-1379451" [ 1073.551530] env[61974]: _type = "Task" [ 1073.551530] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.565819] env[61974]: DEBUG oslo_vmware.api [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379451, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.775653] env[61974]: DEBUG nova.compute.manager [req-698c9a40-a2e8-4cea-9f85-02420dde6b4f req-2bb416cd-9872-4e23-b7fa-66fec0990f4f service nova] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Received event network-vif-plugged-a2cdd452-355a-4078-ad38-aba9f8f7e20b {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1073.775893] env[61974]: DEBUG oslo_concurrency.lockutils [req-698c9a40-a2e8-4cea-9f85-02420dde6b4f req-2bb416cd-9872-4e23-b7fa-66fec0990f4f service nova] Acquiring lock "584ce365-9125-4c2a-9668-f921beb599e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.776096] env[61974]: DEBUG oslo_concurrency.lockutils [req-698c9a40-a2e8-4cea-9f85-02420dde6b4f req-2bb416cd-9872-4e23-b7fa-66fec0990f4f service nova] Lock "584ce365-9125-4c2a-9668-f921beb599e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.776301] env[61974]: DEBUG oslo_concurrency.lockutils [req-698c9a40-a2e8-4cea-9f85-02420dde6b4f req-2bb416cd-9872-4e23-b7fa-66fec0990f4f service nova] Lock "584ce365-9125-4c2a-9668-f921beb599e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.776488] env[61974]: DEBUG nova.compute.manager [req-698c9a40-a2e8-4cea-9f85-02420dde6b4f req-2bb416cd-9872-4e23-b7fa-66fec0990f4f service nova] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] No waiting events found dispatching network-vif-plugged-a2cdd452-355a-4078-ad38-aba9f8f7e20b {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1073.776684] env[61974]: WARNING nova.compute.manager [req-698c9a40-a2e8-4cea-9f85-02420dde6b4f req-2bb416cd-9872-4e23-b7fa-66fec0990f4f service nova] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Received unexpected event network-vif-plugged-a2cdd452-355a-4078-ad38-aba9f8f7e20b for instance with vm_state building and task_state spawning. [ 1073.776846] env[61974]: DEBUG nova.compute.manager [req-698c9a40-a2e8-4cea-9f85-02420dde6b4f req-2bb416cd-9872-4e23-b7fa-66fec0990f4f service nova] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Received event network-changed-a2cdd452-355a-4078-ad38-aba9f8f7e20b {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1073.777687] env[61974]: DEBUG nova.compute.manager [req-698c9a40-a2e8-4cea-9f85-02420dde6b4f req-2bb416cd-9872-4e23-b7fa-66fec0990f4f service nova] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Refreshing instance network info cache due to event network-changed-a2cdd452-355a-4078-ad38-aba9f8f7e20b. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1073.777975] env[61974]: DEBUG oslo_concurrency.lockutils [req-698c9a40-a2e8-4cea-9f85-02420dde6b4f req-2bb416cd-9872-4e23-b7fa-66fec0990f4f service nova] Acquiring lock "refresh_cache-584ce365-9125-4c2a-9668-f921beb599e0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1073.905440] env[61974]: DEBUG nova.network.neutron [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1073.950579] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "33d2889a-7f80-4d65-8325-91355c9bcb46" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.950741] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "33d2889a-7f80-4d65-8325-91355c9bcb46" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.987948] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379450, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.073094] env[61974]: DEBUG oslo_vmware.api [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379451, 'name': PowerOffVM_Task, 'duration_secs': 0.232096} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.076339] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1074.076546] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1074.077059] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c83db97f-69df-4278-a011-9a6c63be95bc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.158636] env[61974]: DEBUG nova.network.neutron [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Updating instance_info_cache with network_info: [{"id": "a2cdd452-355a-4078-ad38-aba9f8f7e20b", "address": "fa:16:3e:c9:0e:54", "network": {"id": "615a7a34-a392-45bd-ba4d-7b39605e520b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432153827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d104a741ebad47748ae5646356589fce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2cdd452-35", "ovs_interfaceid": "a2cdd452-355a-4078-ad38-aba9f8f7e20b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.165590] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1074.165590] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1074.165827] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Deleting the datastore file [datastore2] bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1074.166216] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-336a6ba0-7246-4f76-bf35-acacdf044c19 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.178567] env[61974]: DEBUG oslo_vmware.api [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1074.178567] env[61974]: value = "task-1379453" [ 1074.178567] env[61974]: _type = "Task" [ 1074.178567] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.192047] env[61974]: DEBUG oslo_vmware.api [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379453, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.220775] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac49c6a-db0c-4fbc-8c03-3362ff6e13c1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.232006] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8d28d7-8ce4-423d-9866-eac82a47832a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.267901] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d09edc0-6d89-4c4d-a9b3-d8eca633e54f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.277518] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f866080e-82b8-499d-8f35-00190b3f18e3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.295109] env[61974]: DEBUG nova.compute.provider_tree [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1074.452981] env[61974]: DEBUG nova.compute.manager [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1074.489406] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379450, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.661495] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Releasing lock "refresh_cache-584ce365-9125-4c2a-9668-f921beb599e0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1074.661853] env[61974]: DEBUG nova.compute.manager [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Instance network_info: |[{"id": "a2cdd452-355a-4078-ad38-aba9f8f7e20b", "address": "fa:16:3e:c9:0e:54", "network": {"id": "615a7a34-a392-45bd-ba4d-7b39605e520b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432153827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d104a741ebad47748ae5646356589fce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2cdd452-35", "ovs_interfaceid": "a2cdd452-355a-4078-ad38-aba9f8f7e20b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1074.662228] env[61974]: DEBUG oslo_concurrency.lockutils [req-698c9a40-a2e8-4cea-9f85-02420dde6b4f req-2bb416cd-9872-4e23-b7fa-66fec0990f4f service nova] Acquired lock "refresh_cache-584ce365-9125-4c2a-9668-f921beb599e0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.662426] env[61974]: DEBUG nova.network.neutron [req-698c9a40-a2e8-4cea-9f85-02420dde6b4f req-2bb416cd-9872-4e23-b7fa-66fec0990f4f service nova] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Refreshing network info cache for port a2cdd452-355a-4078-ad38-aba9f8f7e20b {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1074.663679] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:0e:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa09e855-8af1-419b-b78d-8ffcc94b1bfb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a2cdd452-355a-4078-ad38-aba9f8f7e20b', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1074.672776] env[61974]: DEBUG oslo.service.loopingcall [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1074.673369] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1074.673605] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e60e7a4e-b6d6-45a6-b3ab-47d9570c3aa0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.699241] env[61974]: DEBUG oslo_vmware.api [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379453, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.266396} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.700741] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1074.701050] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1074.701326] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1074.701503] env[61974]: INFO nova.compute.manager [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1074.701758] env[61974]: DEBUG oslo.service.loopingcall [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1074.701956] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1074.701956] env[61974]: value = "task-1379454" [ 1074.701956] env[61974]: _type = "Task" [ 1074.701956] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.702255] env[61974]: DEBUG nova.compute.manager [-] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1074.702366] env[61974]: DEBUG nova.network.neutron [-] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1074.714535] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379454, 'name': CreateVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.726264] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "b1fa5433-8f26-48db-a19d-d1e11245fb44" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.726737] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "b1fa5433-8f26-48db-a19d-d1e11245fb44" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.726883] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "b1fa5433-8f26-48db-a19d-d1e11245fb44-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.727128] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "b1fa5433-8f26-48db-a19d-d1e11245fb44-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.727316] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "b1fa5433-8f26-48db-a19d-d1e11245fb44-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.729991] env[61974]: INFO nova.compute.manager [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Terminating instance [ 1074.731961] env[61974]: DEBUG nova.compute.manager [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1074.732185] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1074.733337] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0f9c34-0a52-4b78-8cf3-351614a36c69 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.743261] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1074.743655] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4e9521c5-f7be-4375-bbf3-a41dca67ec6a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.752271] env[61974]: DEBUG oslo_vmware.api [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1074.752271] env[61974]: value = "task-1379455" [ 1074.752271] env[61974]: _type = "Task" [ 1074.752271] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.769078] env[61974]: DEBUG oslo_vmware.api [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379455, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.800120] env[61974]: DEBUG nova.scheduler.client.report [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1074.983303] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.989686] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379450, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.215901] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379454, 'name': CreateVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.268671] env[61974]: DEBUG oslo_vmware.api [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379455, 'name': PowerOffVM_Task, 'duration_secs': 0.319598} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.269391] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1075.269826] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1075.270292] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c404854-4628-4f04-895e-b960162bb4e0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.306323] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.269s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.308281] env[61974]: DEBUG nova.compute.manager [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1075.310271] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.074s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.311499] env[61974]: DEBUG nova.objects.instance [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lazy-loading 'resources' on Instance uuid ca8a238c-4b52-4016-8614-c2f8ad7891f7 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1075.350826] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1075.351104] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1075.351299] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Deleting the datastore file [datastore2] b1fa5433-8f26-48db-a19d-d1e11245fb44 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1075.352072] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b708d20-f6f4-4dcf-8dab-0c2e1c409557 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.361468] env[61974]: DEBUG oslo_vmware.api [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1075.361468] env[61974]: value = "task-1379457" [ 1075.361468] env[61974]: _type = "Task" [ 1075.361468] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.378096] env[61974]: DEBUG oslo_vmware.api [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379457, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.463919] env[61974]: DEBUG nova.network.neutron [req-698c9a40-a2e8-4cea-9f85-02420dde6b4f req-2bb416cd-9872-4e23-b7fa-66fec0990f4f service nova] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Updated VIF entry in instance network info cache for port a2cdd452-355a-4078-ad38-aba9f8f7e20b. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1075.464259] env[61974]: DEBUG nova.network.neutron [req-698c9a40-a2e8-4cea-9f85-02420dde6b4f req-2bb416cd-9872-4e23-b7fa-66fec0990f4f service nova] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Updating instance_info_cache with network_info: [{"id": "a2cdd452-355a-4078-ad38-aba9f8f7e20b", "address": "fa:16:3e:c9:0e:54", "network": {"id": "615a7a34-a392-45bd-ba4d-7b39605e520b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432153827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d104a741ebad47748ae5646356589fce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2cdd452-35", "ovs_interfaceid": "a2cdd452-355a-4078-ad38-aba9f8f7e20b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.488529] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379450, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.559245] env[61974]: DEBUG nova.network.neutron [-] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.714403] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379454, 'name': CreateVM_Task, 'duration_secs': 0.529232} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.714580] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1075.715290] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1075.715458] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.719025] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1075.719025] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bd2ccb0-e5f9-46b7-8c84-2f46b10c217c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.721232] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1075.721232] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5220a19e-e8e3-5801-a038-3a4315b8f217" [ 1075.721232] env[61974]: _type = "Task" [ 1075.721232] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.731243] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5220a19e-e8e3-5801-a038-3a4315b8f217, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.803141] env[61974]: DEBUG nova.compute.manager [req-d2912055-093c-47f1-b75f-203869324318 req-c9a545e0-5b4a-45bb-b6da-9152d855c4d1 service nova] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Received event network-vif-deleted-62349265-e925-44c7-8158-8bfcb7fc0478 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1075.817766] env[61974]: DEBUG nova.compute.utils [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1075.817766] env[61974]: DEBUG nova.compute.manager [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1075.817766] env[61974]: DEBUG nova.network.neutron [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1075.869774] env[61974]: DEBUG nova.policy [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '453e22de6c0f478d93d6269ea122d660', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c61c671d85b64b28872586c2816b83f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1075.880959] env[61974]: DEBUG oslo_vmware.api [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379457, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.970212] env[61974]: DEBUG oslo_concurrency.lockutils [req-698c9a40-a2e8-4cea-9f85-02420dde6b4f req-2bb416cd-9872-4e23-b7fa-66fec0990f4f service nova] Releasing lock "refresh_cache-584ce365-9125-4c2a-9668-f921beb599e0" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1075.992296] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379450, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.696947} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.992641] env[61974]: INFO nova.virt.vmwareapi.ds_util [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_95b9d74c-f086-4a8b-bfdf-f2ffa69369dd/OSTACK_IMG_95b9d74c-f086-4a8b-bfdf-f2ffa69369dd.vmdk to [datastore1] devstack-image-cache_base/7e8de138-c44e-4516-9083-c48e99a4114f/7e8de138-c44e-4516-9083-c48e99a4114f.vmdk. [ 1075.992838] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Cleaning up location [datastore1] OSTACK_IMG_95b9d74c-f086-4a8b-bfdf-f2ffa69369dd {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1075.993020] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_95b9d74c-f086-4a8b-bfdf-f2ffa69369dd {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1075.993315] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-400abeb1-110f-4fda-b2e0-0ae732bb78e5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.000674] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 1076.000674] env[61974]: value = "task-1379458" [ 1076.000674] env[61974]: _type = "Task" [ 1076.000674] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.003698] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028e5fa5-dfe3-4da3-91fb-a107fd9ceeb5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.016087] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75316c82-3868-4d3f-b7ce-f3919677ccec {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.019580] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379458, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.051131] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191ee78d-4dd3-4b7c-8839-a4a2f59cebc8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.059656] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c9d3e8-c3d6-4283-aaaa-ee6f3251f8e3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.064904] env[61974]: INFO nova.compute.manager [-] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Took 1.36 seconds to deallocate network for instance. [ 1076.079261] env[61974]: DEBUG nova.compute.provider_tree [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1076.194974] env[61974]: DEBUG nova.network.neutron [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Successfully created port: 55afa9de-d15b-470c-a494-746a6ad74042 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1076.232257] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5220a19e-e8e3-5801-a038-3a4315b8f217, 'name': SearchDatastore_Task, 'duration_secs': 0.048215} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.232655] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1076.232916] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1076.233339] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1076.233536] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.233726] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1076.234192] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11215e3f-7c8b-4e9d-b8bd-2b25ee414e3b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.243588] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1076.243871] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1076.244654] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1d0feab-bbb6-49ee-ad1a-66e88054dcf3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.249626] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1076.249626] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]528e272c-8009-9ab5-f8ee-41b7cc056d2e" [ 1076.249626] env[61974]: _type = "Task" [ 1076.249626] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.257520] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]528e272c-8009-9ab5-f8ee-41b7cc056d2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.327946] env[61974]: DEBUG nova.compute.manager [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1076.372534] env[61974]: DEBUG oslo_vmware.api [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379457, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.546785} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.372802] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1076.372992] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1076.373285] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1076.373554] env[61974]: INFO nova.compute.manager [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1076.374164] env[61974]: DEBUG oslo.service.loopingcall [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1076.374313] env[61974]: DEBUG nova.compute.manager [-] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1076.374465] env[61974]: DEBUG nova.network.neutron [-] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1076.515240] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379458, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071053} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.515240] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1076.515240] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Releasing lock "[datastore1] devstack-image-cache_base/7e8de138-c44e-4516-9083-c48e99a4114f/7e8de138-c44e-4516-9083-c48e99a4114f.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1076.515240] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/7e8de138-c44e-4516-9083-c48e99a4114f/7e8de138-c44e-4516-9083-c48e99a4114f.vmdk to [datastore1] 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb/1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1076.515240] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b4358c46-ad1d-421a-a0fe-30ed402fc6ad {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.523561] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 1076.523561] env[61974]: value = "task-1379459" [ 1076.523561] env[61974]: _type = "Task" [ 1076.523561] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.534980] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379459, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.584539] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.587526] env[61974]: DEBUG nova.scheduler.client.report [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1076.767163] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]528e272c-8009-9ab5-f8ee-41b7cc056d2e, 'name': SearchDatastore_Task, 'duration_secs': 0.009457} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.769819] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e5d9b75-e53a-4d72-b380-8dba76fd7eee {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.778049] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1076.778049] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52bb430e-2dd0-86c5-aa18-1ea9f4a5c9d9" [ 1076.778049] env[61974]: _type = "Task" [ 1076.778049] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.788263] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52bb430e-2dd0-86c5-aa18-1ea9f4a5c9d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.039641] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379459, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.093692] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.783s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.100020] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.114s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1077.100020] env[61974]: INFO nova.compute.claims [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1077.119407] env[61974]: INFO nova.scheduler.client.report [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Deleted allocations for instance ca8a238c-4b52-4016-8614-c2f8ad7891f7 [ 1077.292425] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52bb430e-2dd0-86c5-aa18-1ea9f4a5c9d9, 'name': SearchDatastore_Task, 'duration_secs': 0.062011} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.292425] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1077.292425] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 584ce365-9125-4c2a-9668-f921beb599e0/584ce365-9125-4c2a-9668-f921beb599e0.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1077.292425] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3ab2bf9-f512-4206-8a7e-feddc7e5a1f5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.299186] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1077.299186] env[61974]: value = "task-1379460" [ 1077.299186] env[61974]: _type = "Task" [ 1077.299186] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.307475] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379460, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.342194] env[61974]: DEBUG nova.compute.manager [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1077.369884] env[61974]: DEBUG nova.virt.hardware [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1077.369884] env[61974]: DEBUG nova.virt.hardware [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1077.369884] env[61974]: DEBUG nova.virt.hardware [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1077.369884] env[61974]: DEBUG nova.virt.hardware [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1077.369884] env[61974]: DEBUG nova.virt.hardware [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1077.370592] env[61974]: DEBUG nova.virt.hardware [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1077.371029] env[61974]: DEBUG nova.virt.hardware [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1077.371328] env[61974]: DEBUG nova.virt.hardware [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1077.371616] env[61974]: DEBUG nova.virt.hardware [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1077.371893] env[61974]: DEBUG nova.virt.hardware [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1077.372217] env[61974]: DEBUG nova.virt.hardware [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1077.373428] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6108264-f201-4dce-a4a8-3dbe93420df6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.382917] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f891b99a-26d9-43de-8518-7ac34f021766 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.518783] env[61974]: DEBUG nova.network.neutron [-] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.537466] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379459, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.630705] env[61974]: DEBUG oslo_concurrency.lockutils [None req-a0d24549-09ba-4e2c-830d-bfd3cb835ac0 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "ca8a238c-4b52-4016-8614-c2f8ad7891f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.265s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.807996] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379460, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.873509] env[61974]: DEBUG nova.compute.manager [req-65a6e7ce-dea5-4cc0-8694-e7c1ec040957 req-a87460cf-d644-4ee9-a7ce-51599748e282 service nova] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Received event network-vif-deleted-7f8230e7-7883-4de2-bf5f-ffa36751a171 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1078.022377] env[61974]: INFO nova.compute.manager [-] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Took 1.65 seconds to deallocate network for instance. [ 1078.042443] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379459, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.245135] env[61974]: DEBUG nova.network.neutron [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Successfully updated port: 55afa9de-d15b-470c-a494-746a6ad74042 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1078.269412] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72aacbaf-3d13-44df-b337-c4040fb7ba14 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.279259] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8463098f-8a89-423d-ae66-a08aba8a8b81 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.319036] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674ad964-68ee-4c00-a48e-0e989ed48da8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.324710] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379460, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.328026] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-565e7ac1-d25a-48d7-bf6c-6fa55e100008 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.345034] env[61974]: DEBUG nova.compute.provider_tree [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1078.533348] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.545805] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379459, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.747931] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "refresh_cache-4d4f5746-5873-4933-8741-c07ca43c13cb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1078.748290] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquired lock "refresh_cache-4d4f5746-5873-4933-8741-c07ca43c13cb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.748492] env[61974]: DEBUG nova.network.neutron [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1078.824368] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379460, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.852300] env[61974]: DEBUG nova.scheduler.client.report [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1078.897022] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Acquiring lock "18559ea6-0cc4-4201-bafa-e63868753a06" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.897151] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Lock "18559ea6-0cc4-4201-bafa-e63868753a06" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.047154] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379459, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.452926} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.047154] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/7e8de138-c44e-4516-9083-c48e99a4114f/7e8de138-c44e-4516-9083-c48e99a4114f.vmdk to [datastore1] 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb/1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1079.047154] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b49920-1049-4392-9c96-b86beb47bfc1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.074557] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb/1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb.vmdk or device None with type streamOptimized {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1079.074745] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f904373-770f-40ad-a525-dc83d3ced670 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.095639] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 1079.095639] env[61974]: value = "task-1379461" [ 1079.095639] env[61974]: _type = "Task" [ 1079.095639] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.106694] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379461, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.123762] env[61974]: DEBUG oslo_concurrency.lockutils [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "14a74bf6-712b-4b82-a24f-6367d5180c6a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.124044] env[61974]: DEBUG oslo_concurrency.lockutils [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "14a74bf6-712b-4b82-a24f-6367d5180c6a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.304532] env[61974]: DEBUG nova.network.neutron [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1079.322123] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379460, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.0005} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.322540] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 584ce365-9125-4c2a-9668-f921beb599e0/584ce365-9125-4c2a-9668-f921beb599e0.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1079.322777] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1079.323090] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5519b199-fee2-4c99-b237-126b5e75b799 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.331147] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1079.331147] env[61974]: value = "task-1379462" [ 1079.331147] env[61974]: _type = "Task" [ 1079.331147] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.339335] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379462, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.357951] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.261s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.358497] env[61974]: DEBUG nova.compute.manager [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1079.361310] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.777s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.361567] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.363588] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.831s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.363859] env[61974]: DEBUG nova.objects.instance [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lazy-loading 'resources' on Instance uuid b1fa5433-8f26-48db-a19d-d1e11245fb44 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1079.393979] env[61974]: INFO nova.scheduler.client.report [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Deleted allocations for instance bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63 [ 1079.401913] env[61974]: DEBUG nova.compute.manager [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1079.460763] env[61974]: DEBUG nova.network.neutron [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Updating instance_info_cache with network_info: [{"id": "55afa9de-d15b-470c-a494-746a6ad74042", "address": "fa:16:3e:87:5f:b4", "network": {"id": "be36ebfc-3548-4420-b5b4-b3efb499516a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1190763400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61c671d85b64b28872586c2816b83f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55afa9de-d1", "ovs_interfaceid": "55afa9de-d15b-470c-a494-746a6ad74042", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.607461] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379461, 'name': ReconfigVM_Task, 'duration_secs': 0.342853} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.607816] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb/1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb.vmdk or device None with type streamOptimized {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1079.608468] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-670c6b2e-be18-437a-96ec-0880b87405b2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.618918] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 1079.618918] env[61974]: value = "task-1379463" [ 1079.618918] env[61974]: _type = "Task" [ 1079.618918] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.630621] env[61974]: DEBUG nova.compute.manager [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1079.632451] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379463, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.842315] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379462, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070686} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.842658] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1079.843626] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f00217c2-cb16-4ad6-af24-a17a4e4eeb8b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.866073] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 584ce365-9125-4c2a-9668-f921beb599e0/584ce365-9125-4c2a-9668-f921beb599e0.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1079.867319] env[61974]: DEBUG nova.compute.utils [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1079.870791] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87301fbf-96de-4e90-b6f6-2c51e3465c21 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.885572] env[61974]: DEBUG nova.compute.manager [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1079.888038] env[61974]: DEBUG nova.compute.manager [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1079.888222] env[61974]: DEBUG nova.network.neutron [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1079.896996] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1079.896996] env[61974]: value = "task-1379464" [ 1079.896996] env[61974]: _type = "Task" [ 1079.896996] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.911598] env[61974]: DEBUG oslo_concurrency.lockutils [None req-76e550b1-7e9a-4f47-b45e-9ef03ee82722 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.395s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.912539] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379464, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.914020] env[61974]: DEBUG nova.compute.manager [req-5f0d9bb1-ade8-4685-80da-386e30e65dc8 req-3a033a45-68a5-456c-aa39-3d7d579cc960 service nova] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Received event network-vif-plugged-55afa9de-d15b-470c-a494-746a6ad74042 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1079.914328] env[61974]: DEBUG oslo_concurrency.lockutils [req-5f0d9bb1-ade8-4685-80da-386e30e65dc8 req-3a033a45-68a5-456c-aa39-3d7d579cc960 service nova] Acquiring lock "4d4f5746-5873-4933-8741-c07ca43c13cb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.915411] env[61974]: DEBUG oslo_concurrency.lockutils [req-5f0d9bb1-ade8-4685-80da-386e30e65dc8 req-3a033a45-68a5-456c-aa39-3d7d579cc960 service nova] Lock "4d4f5746-5873-4933-8741-c07ca43c13cb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.915411] env[61974]: DEBUG oslo_concurrency.lockutils [req-5f0d9bb1-ade8-4685-80da-386e30e65dc8 req-3a033a45-68a5-456c-aa39-3d7d579cc960 service nova] Lock "4d4f5746-5873-4933-8741-c07ca43c13cb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.915411] env[61974]: DEBUG nova.compute.manager [req-5f0d9bb1-ade8-4685-80da-386e30e65dc8 req-3a033a45-68a5-456c-aa39-3d7d579cc960 service nova] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] No waiting events found dispatching network-vif-plugged-55afa9de-d15b-470c-a494-746a6ad74042 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1079.915411] env[61974]: WARNING nova.compute.manager [req-5f0d9bb1-ade8-4685-80da-386e30e65dc8 req-3a033a45-68a5-456c-aa39-3d7d579cc960 service nova] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Received unexpected event network-vif-plugged-55afa9de-d15b-470c-a494-746a6ad74042 for instance with vm_state building and task_state spawning. [ 1079.915411] env[61974]: DEBUG nova.compute.manager [req-5f0d9bb1-ade8-4685-80da-386e30e65dc8 req-3a033a45-68a5-456c-aa39-3d7d579cc960 service nova] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Received event network-changed-55afa9de-d15b-470c-a494-746a6ad74042 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1079.915723] env[61974]: DEBUG nova.compute.manager [req-5f0d9bb1-ade8-4685-80da-386e30e65dc8 req-3a033a45-68a5-456c-aa39-3d7d579cc960 service nova] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Refreshing instance network info cache due to event network-changed-55afa9de-d15b-470c-a494-746a6ad74042. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1079.915811] env[61974]: DEBUG oslo_concurrency.lockutils [req-5f0d9bb1-ade8-4685-80da-386e30e65dc8 req-3a033a45-68a5-456c-aa39-3d7d579cc960 service nova] Acquiring lock "refresh_cache-4d4f5746-5873-4933-8741-c07ca43c13cb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1079.934529] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.959599] env[61974]: DEBUG nova.policy [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6173db476e814cbaa6b3278cfa527bbb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7dae05232e0041dba49b0432d64d82d2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1079.962851] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Releasing lock "refresh_cache-4d4f5746-5873-4933-8741-c07ca43c13cb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1079.963353] env[61974]: DEBUG nova.compute.manager [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Instance network_info: |[{"id": "55afa9de-d15b-470c-a494-746a6ad74042", "address": "fa:16:3e:87:5f:b4", "network": {"id": "be36ebfc-3548-4420-b5b4-b3efb499516a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1190763400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61c671d85b64b28872586c2816b83f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55afa9de-d1", "ovs_interfaceid": "55afa9de-d15b-470c-a494-746a6ad74042", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1079.963516] env[61974]: DEBUG oslo_concurrency.lockutils [req-5f0d9bb1-ade8-4685-80da-386e30e65dc8 req-3a033a45-68a5-456c-aa39-3d7d579cc960 service nova] Acquired lock "refresh_cache-4d4f5746-5873-4933-8741-c07ca43c13cb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.963635] env[61974]: DEBUG nova.network.neutron [req-5f0d9bb1-ade8-4685-80da-386e30e65dc8 req-3a033a45-68a5-456c-aa39-3d7d579cc960 service nova] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Refreshing network info cache for port 55afa9de-d15b-470c-a494-746a6ad74042 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1079.965148] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:5f:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e05affa-2640-435e-a124-0ee8a6ab1152', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55afa9de-d15b-470c-a494-746a6ad74042', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1079.972861] env[61974]: DEBUG oslo.service.loopingcall [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1079.977336] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1079.977973] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d22372cd-01f7-4d36-a750-d5e2e18157df {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.000278] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1080.000278] env[61974]: value = "task-1379465" [ 1080.000278] env[61974]: _type = "Task" [ 1080.000278] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.010277] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379465, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.026466] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc5c0d0c-5eda-477d-8d12-f830cc562496 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.036190] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1392729f-5fff-4c80-bd69-ad72f51b0138 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.069322] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a973420-dfc4-46fb-a0e7-7981c222171d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.076799] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d65cc69-3b2f-4ce1-b38e-44ed0faee26e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.092014] env[61974]: DEBUG nova.compute.provider_tree [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1080.130922] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379463, 'name': Rename_Task, 'duration_secs': 0.140198} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.131268] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1080.131528] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2480214-f4ad-4780-a37e-6a514264e729 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.140450] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 1080.140450] env[61974]: value = "task-1379466" [ 1080.140450] env[61974]: _type = "Task" [ 1080.140450] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.148731] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379466, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.149684] env[61974]: DEBUG oslo_concurrency.lockutils [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.322891] env[61974]: DEBUG nova.network.neutron [req-5f0d9bb1-ade8-4685-80da-386e30e65dc8 req-3a033a45-68a5-456c-aa39-3d7d579cc960 service nova] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Updated VIF entry in instance network info cache for port 55afa9de-d15b-470c-a494-746a6ad74042. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1080.323386] env[61974]: DEBUG nova.network.neutron [req-5f0d9bb1-ade8-4685-80da-386e30e65dc8 req-3a033a45-68a5-456c-aa39-3d7d579cc960 service nova] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Updating instance_info_cache with network_info: [{"id": "55afa9de-d15b-470c-a494-746a6ad74042", "address": "fa:16:3e:87:5f:b4", "network": {"id": "be36ebfc-3548-4420-b5b4-b3efb499516a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1190763400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61c671d85b64b28872586c2816b83f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55afa9de-d1", "ovs_interfaceid": "55afa9de-d15b-470c-a494-746a6ad74042", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.390291] env[61974]: DEBUG nova.network.neutron [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Successfully created port: 2da2f869-4db9-419c-9731-3cb2382bb153 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1080.406279] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379464, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.509378] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379465, 'name': CreateVM_Task, 'duration_secs': 0.343674} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.509566] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1080.510300] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1080.510488] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.510815] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1080.511109] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c5315bf-1ba0-4f3d-bb5a-6afb3c20242e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.515553] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1080.515553] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52a0568d-c88f-519c-e6ac-3c2af915b548" [ 1080.515553] env[61974]: _type = "Task" [ 1080.515553] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.525599] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52a0568d-c88f-519c-e6ac-3c2af915b548, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.596860] env[61974]: DEBUG nova.scheduler.client.report [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1080.651740] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379466, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.826825] env[61974]: DEBUG oslo_concurrency.lockutils [req-5f0d9bb1-ade8-4685-80da-386e30e65dc8 req-3a033a45-68a5-456c-aa39-3d7d579cc960 service nova] Releasing lock "refresh_cache-4d4f5746-5873-4933-8741-c07ca43c13cb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1080.898200] env[61974]: DEBUG nova.compute.manager [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1080.909707] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379464, 'name': ReconfigVM_Task, 'duration_secs': 0.634047} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.909932] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 584ce365-9125-4c2a-9668-f921beb599e0/584ce365-9125-4c2a-9668-f921beb599e0.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1080.910821] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-85296932-252d-4cfd-bfb5-ffe1cbdd077f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.916759] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1080.916759] env[61974]: value = "task-1379467" [ 1080.916759] env[61974]: _type = "Task" [ 1080.916759] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.924936] env[61974]: DEBUG nova.virt.hardware [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1080.925201] env[61974]: DEBUG nova.virt.hardware [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1080.925368] env[61974]: DEBUG nova.virt.hardware [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1080.925560] env[61974]: DEBUG nova.virt.hardware [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1080.925712] env[61974]: DEBUG nova.virt.hardware [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1080.925863] env[61974]: DEBUG nova.virt.hardware [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1080.926096] env[61974]: DEBUG nova.virt.hardware [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1080.926272] env[61974]: DEBUG nova.virt.hardware [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1080.926448] env[61974]: DEBUG nova.virt.hardware [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1080.926620] env[61974]: DEBUG nova.virt.hardware [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1080.926798] env[61974]: DEBUG nova.virt.hardware [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1080.927598] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebbfa22a-63d4-483b-b345-c733bd8c2d82 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.934890] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379467, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.939749] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f0e59e-c4d6-49d4-a66f-a10313a6df35 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.025351] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52a0568d-c88f-519c-e6ac-3c2af915b548, 'name': SearchDatastore_Task, 'duration_secs': 0.00937} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.025665] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1081.025907] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1081.026194] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1081.026350] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.026535] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1081.026787] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-59938f7b-7cc8-4d19-8e7f-cf2fa63526a1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.034642] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1081.034791] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1081.035466] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23fe7a91-35e4-4eea-879d-88906eb37f72 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.040526] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1081.040526] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52b4ba91-c983-7c1b-3a50-735ed429817c" [ 1081.040526] env[61974]: _type = "Task" [ 1081.040526] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.048330] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52b4ba91-c983-7c1b-3a50-735ed429817c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.102057] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.738s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.103786] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.170s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.105313] env[61974]: INFO nova.compute.claims [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1081.126888] env[61974]: INFO nova.scheduler.client.report [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Deleted allocations for instance b1fa5433-8f26-48db-a19d-d1e11245fb44 [ 1081.153185] env[61974]: DEBUG oslo_vmware.api [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379466, 'name': PowerOnVM_Task, 'duration_secs': 0.60999} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.153998] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1081.275804] env[61974]: DEBUG nova.compute.manager [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1081.276747] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7bbb89-5332-4874-8fd5-96e2203044bd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.428207] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379467, 'name': Rename_Task, 'duration_secs': 0.273233} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.428499] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1081.428738] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c9600f12-175d-4ebb-a39f-3c8e4dc260e0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.436452] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1081.436452] env[61974]: value = "task-1379468" [ 1081.436452] env[61974]: _type = "Task" [ 1081.436452] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.445928] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379468, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.551787] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52b4ba91-c983-7c1b-3a50-735ed429817c, 'name': SearchDatastore_Task, 'duration_secs': 0.012713} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.552381] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9a3ebbd-fa9f-4317-9bcd-c9b5275f7b66 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.557397] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1081.557397] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52ac8432-3a12-61c3-39b6-bb0d5af6cd4a" [ 1081.557397] env[61974]: _type = "Task" [ 1081.557397] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.565302] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52ac8432-3a12-61c3-39b6-bb0d5af6cd4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.633864] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6aa5abe6-3081-49c5-9b2b-aa97b20a687f tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "b1fa5433-8f26-48db-a19d-d1e11245fb44" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.907s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.792966] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c88f642-9fec-4619-b67f-095a57876737 tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 28.216s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.942829] env[61974]: DEBUG nova.compute.manager [req-ecf25a9b-5b40-4a20-a296-549fc7949c53 req-7e3893fc-2669-4df7-abe2-d87dcf65ec6e service nova] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Received event network-vif-plugged-2da2f869-4db9-419c-9731-3cb2382bb153 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1081.943071] env[61974]: DEBUG oslo_concurrency.lockutils [req-ecf25a9b-5b40-4a20-a296-549fc7949c53 req-7e3893fc-2669-4df7-abe2-d87dcf65ec6e service nova] Acquiring lock "33d2889a-7f80-4d65-8325-91355c9bcb46-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.943328] env[61974]: DEBUG oslo_concurrency.lockutils [req-ecf25a9b-5b40-4a20-a296-549fc7949c53 req-7e3893fc-2669-4df7-abe2-d87dcf65ec6e service nova] Lock "33d2889a-7f80-4d65-8325-91355c9bcb46-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.943507] env[61974]: DEBUG oslo_concurrency.lockutils [req-ecf25a9b-5b40-4a20-a296-549fc7949c53 req-7e3893fc-2669-4df7-abe2-d87dcf65ec6e service nova] Lock "33d2889a-7f80-4d65-8325-91355c9bcb46-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.943682] env[61974]: DEBUG nova.compute.manager [req-ecf25a9b-5b40-4a20-a296-549fc7949c53 req-7e3893fc-2669-4df7-abe2-d87dcf65ec6e service nova] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] No waiting events found dispatching network-vif-plugged-2da2f869-4db9-419c-9731-3cb2382bb153 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1081.943851] env[61974]: WARNING nova.compute.manager [req-ecf25a9b-5b40-4a20-a296-549fc7949c53 req-7e3893fc-2669-4df7-abe2-d87dcf65ec6e service nova] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Received unexpected event network-vif-plugged-2da2f869-4db9-419c-9731-3cb2382bb153 for instance with vm_state building and task_state spawning. [ 1081.949984] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379468, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.957553] env[61974]: DEBUG nova.network.neutron [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Successfully updated port: 2da2f869-4db9-419c-9731-3cb2382bb153 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1082.068377] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52ac8432-3a12-61c3-39b6-bb0d5af6cd4a, 'name': SearchDatastore_Task, 'duration_secs': 0.008953} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.068972] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.068972] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 4d4f5746-5873-4933-8741-c07ca43c13cb/4d4f5746-5873-4933-8741-c07ca43c13cb.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1082.069185] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba3f3890-6f62-4f80-8056-13f94cf55c03 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.075796] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1082.075796] env[61974]: value = "task-1379469" [ 1082.075796] env[61974]: _type = "Task" [ 1082.075796] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.083735] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379469, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.235213] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f224dfb7-f45b-4afb-baed-62feab6b0055 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.243664] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68af18b4-d4b8-44f3-8aa7-cbc1b12db3ca {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.275117] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ab84c4-7d1d-41ea-a2dd-3bd5d26012be {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.283377] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2650ec1f-fd26-4f59-bdaf-2fb415450acf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.299148] env[61974]: DEBUG nova.compute.provider_tree [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.448550] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379468, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.460459] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "refresh_cache-33d2889a-7f80-4d65-8325-91355c9bcb46" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1082.460633] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "refresh_cache-33d2889a-7f80-4d65-8325-91355c9bcb46" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.460759] env[61974]: DEBUG nova.network.neutron [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1082.587281] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379469, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491878} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.587685] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 4d4f5746-5873-4933-8741-c07ca43c13cb/4d4f5746-5873-4933-8741-c07ca43c13cb.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1082.587963] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1082.588244] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-48002c3d-686b-46ed-9795-0608461c3040 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.595291] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1082.595291] env[61974]: value = "task-1379470" [ 1082.595291] env[61974]: _type = "Task" [ 1082.595291] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.604657] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379470, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.803259] env[61974]: DEBUG nova.scheduler.client.report [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1082.952663] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379468, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.992457] env[61974]: DEBUG nova.network.neutron [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1083.109707] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379470, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.265382} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.110042] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1083.110872] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba815181-5667-481b-aedb-8d94e84cb3a9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.140654] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 4d4f5746-5873-4933-8741-c07ca43c13cb/4d4f5746-5873-4933-8741-c07ca43c13cb.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1083.143545] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07422f21-3ecd-449e-8089-4f8f2ad06584 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.164487] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1083.164487] env[61974]: value = "task-1379471" [ 1083.164487] env[61974]: _type = "Task" [ 1083.164487] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.173137] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379471, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.201899] env[61974]: DEBUG nova.network.neutron [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Updating instance_info_cache with network_info: [{"id": "2da2f869-4db9-419c-9731-3cb2382bb153", "address": "fa:16:3e:9f:e0:d3", "network": {"id": "b42774a0-686b-4132-a599-7cec777b9919", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1826867553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dae05232e0041dba49b0432d64d82d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2da2f869-4d", "ovs_interfaceid": "2da2f869-4db9-419c-9731-3cb2382bb153", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.308697] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.205s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.309335] env[61974]: DEBUG nova.compute.manager [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1083.312333] env[61974]: DEBUG oslo_concurrency.lockutils [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.163s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.314313] env[61974]: INFO nova.compute.claims [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1083.447879] env[61974]: DEBUG oslo_vmware.api [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379468, 'name': PowerOnVM_Task, 'duration_secs': 1.702229} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.448082] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1083.448201] env[61974]: INFO nova.compute.manager [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Took 11.28 seconds to spawn the instance on the hypervisor. [ 1083.448395] env[61974]: DEBUG nova.compute.manager [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1083.449281] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3cac23-6834-49ab-a6b0-3ae221f724e8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.675565] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379471, 'name': ReconfigVM_Task, 'duration_secs': 0.281975} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.675914] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 4d4f5746-5873-4933-8741-c07ca43c13cb/4d4f5746-5873-4933-8741-c07ca43c13cb.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1083.676572] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a282ff7a-7691-45ee-ab66-82e310d6e71e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.683256] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1083.683256] env[61974]: value = "task-1379472" [ 1083.683256] env[61974]: _type = "Task" [ 1083.683256] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.691782] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379472, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.707966] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "refresh_cache-33d2889a-7f80-4d65-8325-91355c9bcb46" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1083.708287] env[61974]: DEBUG nova.compute.manager [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Instance network_info: |[{"id": "2da2f869-4db9-419c-9731-3cb2382bb153", "address": "fa:16:3e:9f:e0:d3", "network": {"id": "b42774a0-686b-4132-a599-7cec777b9919", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1826867553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dae05232e0041dba49b0432d64d82d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2da2f869-4d", "ovs_interfaceid": "2da2f869-4db9-419c-9731-3cb2382bb153", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1083.708740] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:e0:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f4399275-8e92-4448-be9e-d4984e93e89c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2da2f869-4db9-419c-9731-3cb2382bb153', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1083.716442] env[61974]: DEBUG oslo.service.loopingcall [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1083.716663] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1083.716841] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7798c199-6cf9-498a-b614-442ef432aa58 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.735763] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1083.735763] env[61974]: value = "task-1379473" [ 1083.735763] env[61974]: _type = "Task" [ 1083.735763] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.743321] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379473, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.819513] env[61974]: DEBUG nova.compute.utils [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1083.824924] env[61974]: DEBUG nova.compute.manager [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1083.824924] env[61974]: DEBUG nova.network.neutron [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1083.881303] env[61974]: DEBUG nova.policy [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a4bc000decdf486e8f4801c417d00778', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '44c3e02b442a42d2865633bbe651bf76', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1083.946160] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d8c93b-0812-4da6-9762-f6a2d5d9b1d4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.953730] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-69260d34-07c2-416e-9077-ec90b45a560f tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Suspending the VM {{(pid=61974) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1083.954028] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-c9d60423-cd14-4175-bf55-d8ca20a8d57a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.964126] env[61974]: DEBUG oslo_vmware.api [None req-69260d34-07c2-416e-9077-ec90b45a560f tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 1083.964126] env[61974]: value = "task-1379474" [ 1083.964126] env[61974]: _type = "Task" [ 1083.964126] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.970208] env[61974]: INFO nova.compute.manager [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Took 22.08 seconds to build instance. [ 1083.975506] env[61974]: DEBUG oslo_vmware.api [None req-69260d34-07c2-416e-9077-ec90b45a560f tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379474, 'name': SuspendVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.977699] env[61974]: DEBUG nova.compute.manager [req-98dd0c3d-b109-4b32-8076-81601a50702a req-e7f696d6-f791-4651-8d6c-b80fa28d39ad service nova] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Received event network-changed-2da2f869-4db9-419c-9731-3cb2382bb153 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1083.977894] env[61974]: DEBUG nova.compute.manager [req-98dd0c3d-b109-4b32-8076-81601a50702a req-e7f696d6-f791-4651-8d6c-b80fa28d39ad service nova] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Refreshing instance network info cache due to event network-changed-2da2f869-4db9-419c-9731-3cb2382bb153. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1083.978160] env[61974]: DEBUG oslo_concurrency.lockutils [req-98dd0c3d-b109-4b32-8076-81601a50702a req-e7f696d6-f791-4651-8d6c-b80fa28d39ad service nova] Acquiring lock "refresh_cache-33d2889a-7f80-4d65-8325-91355c9bcb46" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1083.978737] env[61974]: DEBUG oslo_concurrency.lockutils [req-98dd0c3d-b109-4b32-8076-81601a50702a req-e7f696d6-f791-4651-8d6c-b80fa28d39ad service nova] Acquired lock "refresh_cache-33d2889a-7f80-4d65-8325-91355c9bcb46" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.978737] env[61974]: DEBUG nova.network.neutron [req-98dd0c3d-b109-4b32-8076-81601a50702a req-e7f696d6-f791-4651-8d6c-b80fa28d39ad service nova] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Refreshing network info cache for port 2da2f869-4db9-419c-9731-3cb2382bb153 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1084.194241] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379472, 'name': Rename_Task, 'duration_secs': 0.161298} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.194623] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1084.194890] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58ff9081-9c17-4b0a-bd60-01b8d6c8d7d6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.201516] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1084.201516] env[61974]: value = "task-1379475" [ 1084.201516] env[61974]: _type = "Task" [ 1084.201516] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.216022] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379475, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.241769] env[61974]: DEBUG nova.network.neutron [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Successfully created port: d71dea12-f0a5-480d-ad8c-3957db2708ee {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1084.250578] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379473, 'name': CreateVM_Task, 'duration_secs': 0.315942} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.250775] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1084.251850] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1084.251850] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.252011] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1084.252271] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd6e6b9e-ab4a-45c8-8d67-26b12fa5500b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.257178] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1084.257178] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]522c5f63-dc7c-5e65-6fc4-8d587d5914a6" [ 1084.257178] env[61974]: _type = "Task" [ 1084.257178] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.267073] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]522c5f63-dc7c-5e65-6fc4-8d587d5914a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.316193] env[61974]: DEBUG oslo_concurrency.lockutils [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "93409fd8-c9aa-427b-94b2-93f3db982786" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1084.316710] env[61974]: DEBUG oslo_concurrency.lockutils [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "93409fd8-c9aa-427b-94b2-93f3db982786" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.322502] env[61974]: DEBUG nova.compute.manager [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1084.418189] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e51c6d-b748-4c87-9f6b-37c6e9a8acec {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.427891] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-6d18871e-c982-4e5b-ab47-1d0354814a11 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Suspending the VM {{(pid=61974) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1084.428201] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-6cf1db02-970e-4907-84cd-2080486bb590 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.435035] env[61974]: DEBUG oslo_vmware.api [None req-6d18871e-c982-4e5b-ab47-1d0354814a11 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1084.435035] env[61974]: value = "task-1379476" [ 1084.435035] env[61974]: _type = "Task" [ 1084.435035] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.445748] env[61974]: DEBUG oslo_vmware.api [None req-6d18871e-c982-4e5b-ab47-1d0354814a11 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379476, 'name': SuspendVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.473190] env[61974]: DEBUG oslo_concurrency.lockutils [None req-dfb2aaf1-c96f-4c3c-9a7e-5a8374b3221f tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "584ce365-9125-4c2a-9668-f921beb599e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.589s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.473662] env[61974]: DEBUG oslo_vmware.api [None req-69260d34-07c2-416e-9077-ec90b45a560f tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379474, 'name': SuspendVM_Task} progress is 45%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.474988] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-810d2385-d48b-4dbe-b2aa-a82b0fb5ce87 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.483155] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c75f33f-6534-4653-8594-e82d9fc80f3d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.513761] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6d95a8-6d4e-4239-bf8b-6c5319c9816b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.521372] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f17a99d-1313-4ee5-aab0-187453bbb70e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.536555] env[61974]: DEBUG nova.compute.provider_tree [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1084.712026] env[61974]: DEBUG oslo_vmware.api [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379475, 'name': PowerOnVM_Task, 'duration_secs': 0.493639} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.714711] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1084.714944] env[61974]: INFO nova.compute.manager [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Took 7.37 seconds to spawn the instance on the hypervisor. [ 1084.715151] env[61974]: DEBUG nova.compute.manager [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1084.715977] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e127ecbd-c8f4-4842-98f2-32e3e1cbe673 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.772025] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]522c5f63-dc7c-5e65-6fc4-8d587d5914a6, 'name': SearchDatastore_Task, 'duration_secs': 0.012784} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.772025] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1084.772025] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1084.772025] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1084.772025] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.772025] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1084.772025] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69095d49-03e8-4525-97a2-00d657c386a4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.783027] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1084.783027] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1084.783027] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e981620f-7f27-4188-bee3-18b31d4b9268 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.788175] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1084.788175] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52bfd0a1-bf90-3354-e65d-6636978ad324" [ 1084.788175] env[61974]: _type = "Task" [ 1084.788175] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.796401] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52bfd0a1-bf90-3354-e65d-6636978ad324, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.800850] env[61974]: DEBUG nova.network.neutron [req-98dd0c3d-b109-4b32-8076-81601a50702a req-e7f696d6-f791-4651-8d6c-b80fa28d39ad service nova] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Updated VIF entry in instance network info cache for port 2da2f869-4db9-419c-9731-3cb2382bb153. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1084.801545] env[61974]: DEBUG nova.network.neutron [req-98dd0c3d-b109-4b32-8076-81601a50702a req-e7f696d6-f791-4651-8d6c-b80fa28d39ad service nova] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Updating instance_info_cache with network_info: [{"id": "2da2f869-4db9-419c-9731-3cb2382bb153", "address": "fa:16:3e:9f:e0:d3", "network": {"id": "b42774a0-686b-4132-a599-7cec777b9919", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1826867553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7dae05232e0041dba49b0432d64d82d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2da2f869-4d", "ovs_interfaceid": "2da2f869-4db9-419c-9731-3cb2382bb153", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.819447] env[61974]: DEBUG nova.compute.manager [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1084.947987] env[61974]: DEBUG oslo_vmware.api [None req-6d18871e-c982-4e5b-ab47-1d0354814a11 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379476, 'name': SuspendVM_Task} progress is 62%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.974703] env[61974]: DEBUG oslo_vmware.api [None req-69260d34-07c2-416e-9077-ec90b45a560f tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379474, 'name': SuspendVM_Task} progress is 45%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.041545] env[61974]: DEBUG nova.scheduler.client.report [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1085.233370] env[61974]: INFO nova.compute.manager [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Took 12.22 seconds to build instance. [ 1085.299256] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52bfd0a1-bf90-3354-e65d-6636978ad324, 'name': SearchDatastore_Task, 'duration_secs': 0.015891} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.300166] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fef490a3-944f-4884-bd65-41e21ea13b81 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.304552] env[61974]: DEBUG oslo_concurrency.lockutils [req-98dd0c3d-b109-4b32-8076-81601a50702a req-e7f696d6-f791-4651-8d6c-b80fa28d39ad service nova] Releasing lock "refresh_cache-33d2889a-7f80-4d65-8325-91355c9bcb46" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1085.305817] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1085.305817] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5272907f-513e-66d0-96d7-b2307eb04422" [ 1085.305817] env[61974]: _type = "Task" [ 1085.305817] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.313655] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5272907f-513e-66d0-96d7-b2307eb04422, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.340070] env[61974]: DEBUG nova.compute.manager [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1085.343175] env[61974]: DEBUG oslo_concurrency.lockutils [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1085.367170] env[61974]: DEBUG nova.virt.hardware [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1085.367496] env[61974]: DEBUG nova.virt.hardware [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1085.367673] env[61974]: DEBUG nova.virt.hardware [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1085.367866] env[61974]: DEBUG nova.virt.hardware [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1085.368033] env[61974]: DEBUG nova.virt.hardware [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1085.368197] env[61974]: DEBUG nova.virt.hardware [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1085.368412] env[61974]: DEBUG nova.virt.hardware [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1085.368574] env[61974]: DEBUG nova.virt.hardware [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1085.368758] env[61974]: DEBUG nova.virt.hardware [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1085.368967] env[61974]: DEBUG nova.virt.hardware [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1085.369176] env[61974]: DEBUG nova.virt.hardware [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1085.370099] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83bc1b49-b190-4c71-a8d5-27d70a87f1ae {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.378328] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e80290b-5193-446d-b8b6-04d006b5207f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.447481] env[61974]: DEBUG oslo_vmware.api [None req-6d18871e-c982-4e5b-ab47-1d0354814a11 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379476, 'name': SuspendVM_Task, 'duration_secs': 0.887643} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.447769] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-6d18871e-c982-4e5b-ab47-1d0354814a11 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Suspended the VM {{(pid=61974) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1085.447994] env[61974]: DEBUG nova.compute.manager [None req-6d18871e-c982-4e5b-ab47-1d0354814a11 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1085.448803] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36733cad-dbc2-4bee-8bb0-419d46dd8fb1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.474454] env[61974]: DEBUG oslo_vmware.api [None req-69260d34-07c2-416e-9077-ec90b45a560f tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379474, 'name': SuspendVM_Task, 'duration_secs': 1.329077} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.474743] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-69260d34-07c2-416e-9077-ec90b45a560f tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Suspended the VM {{(pid=61974) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1085.474840] env[61974]: DEBUG nova.compute.manager [None req-69260d34-07c2-416e-9077-ec90b45a560f tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1085.475606] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6286daa6-1067-4820-a8f3-ea384e683d5a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.545904] env[61974]: DEBUG oslo_concurrency.lockutils [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.233s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.546461] env[61974]: DEBUG nova.compute.manager [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1085.549460] env[61974]: DEBUG oslo_concurrency.lockutils [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.206s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1085.551245] env[61974]: INFO nova.compute.claims [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1085.735889] env[61974]: DEBUG oslo_concurrency.lockutils [None req-ac651d00-57af-4a0a-b044-7d8a423d19b3 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "4d4f5746-5873-4933-8741-c07ca43c13cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.726s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.816778] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5272907f-513e-66d0-96d7-b2307eb04422, 'name': SearchDatastore_Task, 'duration_secs': 0.031708} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.817266] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1085.820134] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 33d2889a-7f80-4d65-8325-91355c9bcb46/33d2889a-7f80-4d65-8325-91355c9bcb46.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1085.820134] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f0d43d91-2b98-46cf-94a7-9bd172f8256b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.824870] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1085.824870] env[61974]: value = "task-1379477" [ 1085.824870] env[61974]: _type = "Task" [ 1085.824870] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.833361] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379477, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.037750] env[61974]: DEBUG nova.network.neutron [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Successfully updated port: d71dea12-f0a5-480d-ad8c-3957db2708ee {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1086.056179] env[61974]: DEBUG nova.compute.utils [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1086.062631] env[61974]: DEBUG nova.compute.manager [req-d93950f8-443e-44d6-906f-4e43e3e18f49 req-99397e06-5d41-4b2d-a080-faeec49be165 service nova] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Received event network-vif-plugged-d71dea12-f0a5-480d-ad8c-3957db2708ee {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1086.064739] env[61974]: DEBUG oslo_concurrency.lockutils [req-d93950f8-443e-44d6-906f-4e43e3e18f49 req-99397e06-5d41-4b2d-a080-faeec49be165 service nova] Acquiring lock "18559ea6-0cc4-4201-bafa-e63868753a06-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.065037] env[61974]: DEBUG oslo_concurrency.lockutils [req-d93950f8-443e-44d6-906f-4e43e3e18f49 req-99397e06-5d41-4b2d-a080-faeec49be165 service nova] Lock "18559ea6-0cc4-4201-bafa-e63868753a06-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.066028] env[61974]: DEBUG oslo_concurrency.lockutils [req-d93950f8-443e-44d6-906f-4e43e3e18f49 req-99397e06-5d41-4b2d-a080-faeec49be165 service nova] Lock "18559ea6-0cc4-4201-bafa-e63868753a06-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.066304] env[61974]: DEBUG nova.compute.manager [req-d93950f8-443e-44d6-906f-4e43e3e18f49 req-99397e06-5d41-4b2d-a080-faeec49be165 service nova] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] No waiting events found dispatching network-vif-plugged-d71dea12-f0a5-480d-ad8c-3957db2708ee {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1086.066536] env[61974]: WARNING nova.compute.manager [req-d93950f8-443e-44d6-906f-4e43e3e18f49 req-99397e06-5d41-4b2d-a080-faeec49be165 service nova] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Received unexpected event network-vif-plugged-d71dea12-f0a5-480d-ad8c-3957db2708ee for instance with vm_state building and task_state spawning. [ 1086.066816] env[61974]: DEBUG nova.compute.manager [req-d93950f8-443e-44d6-906f-4e43e3e18f49 req-99397e06-5d41-4b2d-a080-faeec49be165 service nova] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Received event network-changed-55afa9de-d15b-470c-a494-746a6ad74042 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1086.067687] env[61974]: DEBUG nova.compute.manager [req-d93950f8-443e-44d6-906f-4e43e3e18f49 req-99397e06-5d41-4b2d-a080-faeec49be165 service nova] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Refreshing instance network info cache due to event network-changed-55afa9de-d15b-470c-a494-746a6ad74042. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1086.067781] env[61974]: DEBUG oslo_concurrency.lockutils [req-d93950f8-443e-44d6-906f-4e43e3e18f49 req-99397e06-5d41-4b2d-a080-faeec49be165 service nova] Acquiring lock "refresh_cache-4d4f5746-5873-4933-8741-c07ca43c13cb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1086.070381] env[61974]: DEBUG oslo_concurrency.lockutils [req-d93950f8-443e-44d6-906f-4e43e3e18f49 req-99397e06-5d41-4b2d-a080-faeec49be165 service nova] Acquired lock "refresh_cache-4d4f5746-5873-4933-8741-c07ca43c13cb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.070381] env[61974]: DEBUG nova.network.neutron [req-d93950f8-443e-44d6-906f-4e43e3e18f49 req-99397e06-5d41-4b2d-a080-faeec49be165 service nova] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Refreshing network info cache for port 55afa9de-d15b-470c-a494-746a6ad74042 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1086.072950] env[61974]: DEBUG nova.compute.manager [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1086.075174] env[61974]: DEBUG nova.network.neutron [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1086.128097] env[61974]: DEBUG nova.policy [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '139f2fab7d4c492ab0d6fb16ea947457', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4757d24b61794cfcaefff2ad44e02b74', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1086.338858] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379477, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.391988] env[61974]: DEBUG nova.network.neutron [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Successfully created port: 5a34a1d1-d7f7-446b-ac5a-8c1da1db74ee {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1086.418713] env[61974]: INFO nova.compute.manager [None req-f277d4b9-ff6d-481d-b4b4-39a37ce5ce8e tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Resuming [ 1086.419343] env[61974]: DEBUG nova.objects.instance [None req-f277d4b9-ff6d-481d-b4b4-39a37ce5ce8e tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lazy-loading 'flavor' on Instance uuid 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1086.546745] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Acquiring lock "refresh_cache-18559ea6-0cc4-4201-bafa-e63868753a06" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1086.546962] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Acquired lock "refresh_cache-18559ea6-0cc4-4201-bafa-e63868753a06" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.547085] env[61974]: DEBUG nova.network.neutron [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1086.575242] env[61974]: DEBUG nova.compute.manager [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1086.733943] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ec0329-b360-4317-8fcd-c2d115548ae0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.742933] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8800f7-507f-4a9d-934f-c972320b1b4d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.774091] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2d8965-0e3b-40a9-a099-71518a6e9bd9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.781929] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9afa90e-ba0a-4933-a10b-096503c45db2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.797476] env[61974]: DEBUG nova.compute.provider_tree [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1086.837506] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379477, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.840268] env[61974]: DEBUG oslo_concurrency.lockutils [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "584ce365-9125-4c2a-9668-f921beb599e0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.840604] env[61974]: DEBUG oslo_concurrency.lockutils [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "584ce365-9125-4c2a-9668-f921beb599e0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.840848] env[61974]: DEBUG oslo_concurrency.lockutils [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "584ce365-9125-4c2a-9668-f921beb599e0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.841153] env[61974]: DEBUG oslo_concurrency.lockutils [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "584ce365-9125-4c2a-9668-f921beb599e0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.841370] env[61974]: DEBUG oslo_concurrency.lockutils [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "584ce365-9125-4c2a-9668-f921beb599e0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.845439] env[61974]: INFO nova.compute.manager [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Terminating instance [ 1086.848722] env[61974]: DEBUG nova.compute.manager [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1086.848930] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1086.850328] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6baa3a-9013-4794-9120-d9cf662f3db8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.860882] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1086.861337] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-266e41cc-8735-461c-9142-f398c1fbac73 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.930909] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1086.930909] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1086.930909] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Deleting the datastore file [datastore1] 584ce365-9125-4c2a-9668-f921beb599e0 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1086.930909] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6c9728e-ddf3-4acb-8f86-4267f36c6e89 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.937438] env[61974]: DEBUG oslo_vmware.api [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1086.937438] env[61974]: value = "task-1379479" [ 1086.937438] env[61974]: _type = "Task" [ 1086.937438] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.948803] env[61974]: DEBUG oslo_vmware.api [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379479, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.154063] env[61974]: DEBUG nova.network.neutron [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1087.307141] env[61974]: DEBUG nova.scheduler.client.report [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1087.339560] env[61974]: DEBUG nova.network.neutron [req-d93950f8-443e-44d6-906f-4e43e3e18f49 req-99397e06-5d41-4b2d-a080-faeec49be165 service nova] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Updated VIF entry in instance network info cache for port 55afa9de-d15b-470c-a494-746a6ad74042. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1087.340091] env[61974]: DEBUG nova.network.neutron [req-d93950f8-443e-44d6-906f-4e43e3e18f49 req-99397e06-5d41-4b2d-a080-faeec49be165 service nova] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Updating instance_info_cache with network_info: [{"id": "55afa9de-d15b-470c-a494-746a6ad74042", "address": "fa:16:3e:87:5f:b4", "network": {"id": "be36ebfc-3548-4420-b5b4-b3efb499516a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1190763400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61c671d85b64b28872586c2816b83f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55afa9de-d1", "ovs_interfaceid": "55afa9de-d15b-470c-a494-746a6ad74042", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.348839] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379477, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.178575} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.349364] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 33d2889a-7f80-4d65-8325-91355c9bcb46/33d2889a-7f80-4d65-8325-91355c9bcb46.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1087.349623] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1087.350163] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-004cc453-d4f7-4a19-8ed3-8e74047e61d9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.358785] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1087.358785] env[61974]: value = "task-1379480" [ 1087.358785] env[61974]: _type = "Task" [ 1087.358785] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.366748] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379480, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.391826] env[61974]: DEBUG nova.network.neutron [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Updating instance_info_cache with network_info: [{"id": "d71dea12-f0a5-480d-ad8c-3957db2708ee", "address": "fa:16:3e:19:d3:74", "network": {"id": "2e5b2d81-dc13-418f-80bb-9289bafc6cf2", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-520958173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44c3e02b442a42d2865633bbe651bf76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c80243e-93a7-4a95-bc8d-e9534bacd66e", "external-id": "nsx-vlan-transportzone-306", "segmentation_id": 306, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd71dea12-f0", "ovs_interfaceid": "d71dea12-f0a5-480d-ad8c-3957db2708ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.431129] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f277d4b9-ff6d-481d-b4b4-39a37ce5ce8e tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "refresh_cache-1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1087.431375] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f277d4b9-ff6d-481d-b4b4-39a37ce5ce8e tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquired lock "refresh_cache-1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.431601] env[61974]: DEBUG nova.network.neutron [None req-f277d4b9-ff6d-481d-b4b4-39a37ce5ce8e tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1087.448722] env[61974]: DEBUG oslo_vmware.api [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379479, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.200256} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.448722] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1087.448903] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1087.448996] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1087.449440] env[61974]: INFO nova.compute.manager [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1087.449534] env[61974]: DEBUG oslo.service.loopingcall [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1087.449770] env[61974]: DEBUG nova.compute.manager [-] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1087.449900] env[61974]: DEBUG nova.network.neutron [-] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1087.588626] env[61974]: DEBUG nova.compute.manager [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1087.616460] env[61974]: DEBUG nova.virt.hardware [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1087.616735] env[61974]: DEBUG nova.virt.hardware [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1087.616904] env[61974]: DEBUG nova.virt.hardware [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1087.617107] env[61974]: DEBUG nova.virt.hardware [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1087.617272] env[61974]: DEBUG nova.virt.hardware [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1087.617480] env[61974]: DEBUG nova.virt.hardware [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1087.617710] env[61974]: DEBUG nova.virt.hardware [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1087.617948] env[61974]: DEBUG nova.virt.hardware [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1087.618183] env[61974]: DEBUG nova.virt.hardware [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1087.618366] env[61974]: DEBUG nova.virt.hardware [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1087.618552] env[61974]: DEBUG nova.virt.hardware [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1087.619493] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a75333-b5bf-46b0-ac08-24a68332f226 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.627730] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1cf2e2a-f907-41b7-b7f0-34c111b81414 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.812548] env[61974]: DEBUG oslo_concurrency.lockutils [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.263s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1087.813108] env[61974]: DEBUG nova.compute.manager [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1087.850493] env[61974]: DEBUG oslo_concurrency.lockutils [req-d93950f8-443e-44d6-906f-4e43e3e18f49 req-99397e06-5d41-4b2d-a080-faeec49be165 service nova] Releasing lock "refresh_cache-4d4f5746-5873-4933-8741-c07ca43c13cb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1087.869683] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379480, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067556} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.870011] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1087.870813] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615fdeb5-ff8e-4bd4-b9ca-96567ac08309 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.893179] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] 33d2889a-7f80-4d65-8325-91355c9bcb46/33d2889a-7f80-4d65-8325-91355c9bcb46.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1087.893801] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5769dfec-42ff-48ce-8b8f-a3b59c5ac7cb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.908231] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Releasing lock "refresh_cache-18559ea6-0cc4-4201-bafa-e63868753a06" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1087.908538] env[61974]: DEBUG nova.compute.manager [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Instance network_info: |[{"id": "d71dea12-f0a5-480d-ad8c-3957db2708ee", "address": "fa:16:3e:19:d3:74", "network": {"id": "2e5b2d81-dc13-418f-80bb-9289bafc6cf2", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-520958173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44c3e02b442a42d2865633bbe651bf76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c80243e-93a7-4a95-bc8d-e9534bacd66e", "external-id": "nsx-vlan-transportzone-306", "segmentation_id": 306, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd71dea12-f0", "ovs_interfaceid": "d71dea12-f0a5-480d-ad8c-3957db2708ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1087.908948] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:d3:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7c80243e-93a7-4a95-bc8d-e9534bacd66e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd71dea12-f0a5-480d-ad8c-3957db2708ee', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1087.916202] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Creating folder: Project (44c3e02b442a42d2865633bbe651bf76). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1087.917480] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ee3831b2-a8e6-4286-b066-aed55d55e576 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.921742] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1087.921742] env[61974]: value = "task-1379481" [ 1087.921742] env[61974]: _type = "Task" [ 1087.921742] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.930157] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379481, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.931336] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Created folder: Project (44c3e02b442a42d2865633bbe651bf76) in parent group-v292912. [ 1087.931524] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Creating folder: Instances. Parent ref: group-v293032. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1087.931771] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6e02c0f5-8040-4e20-a5ae-70e23dbd9056 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.949222] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Created folder: Instances in parent group-v293032. [ 1087.949520] env[61974]: DEBUG oslo.service.loopingcall [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1087.949735] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1087.949965] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-45d685c3-084e-45dd-86b5-d0ea640265f8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.970920] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1087.970920] env[61974]: value = "task-1379484" [ 1087.970920] env[61974]: _type = "Task" [ 1087.970920] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.978039] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379484, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.086751] env[61974]: DEBUG nova.compute.manager [req-82662fe2-595f-4a0c-84ce-ec1408acb228 req-4c4a198c-9c34-4ccf-8175-83745a89b8b4 service nova] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Received event network-changed-d71dea12-f0a5-480d-ad8c-3957db2708ee {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1088.086963] env[61974]: DEBUG nova.compute.manager [req-82662fe2-595f-4a0c-84ce-ec1408acb228 req-4c4a198c-9c34-4ccf-8175-83745a89b8b4 service nova] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Refreshing instance network info cache due to event network-changed-d71dea12-f0a5-480d-ad8c-3957db2708ee. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1088.087203] env[61974]: DEBUG oslo_concurrency.lockutils [req-82662fe2-595f-4a0c-84ce-ec1408acb228 req-4c4a198c-9c34-4ccf-8175-83745a89b8b4 service nova] Acquiring lock "refresh_cache-18559ea6-0cc4-4201-bafa-e63868753a06" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1088.087354] env[61974]: DEBUG oslo_concurrency.lockutils [req-82662fe2-595f-4a0c-84ce-ec1408acb228 req-4c4a198c-9c34-4ccf-8175-83745a89b8b4 service nova] Acquired lock "refresh_cache-18559ea6-0cc4-4201-bafa-e63868753a06" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.087521] env[61974]: DEBUG nova.network.neutron [req-82662fe2-595f-4a0c-84ce-ec1408acb228 req-4c4a198c-9c34-4ccf-8175-83745a89b8b4 service nova] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Refreshing network info cache for port d71dea12-f0a5-480d-ad8c-3957db2708ee {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1088.157559] env[61974]: DEBUG nova.network.neutron [None req-f277d4b9-ff6d-481d-b4b4-39a37ce5ce8e tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Updating instance_info_cache with network_info: [{"id": "e377f334-8d36-4f17-8532-abbd37c47eba", "address": "fa:16:3e:7f:63:47", "network": {"id": "a6e61508-1f16-48a9-a21d-2f9212fcf523", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1486161933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb6e7e7e52fc4aacaf5054732cd7d2df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ccbc7a-cf8d-4ea2-8411-291a1e27df7b", "external-id": "nsx-vlan-transportzone-998", "segmentation_id": 998, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape377f334-8d", "ovs_interfaceid": "e377f334-8d36-4f17-8532-abbd37c47eba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.161303] env[61974]: DEBUG nova.network.neutron [-] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.318974] env[61974]: DEBUG nova.compute.utils [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1088.320415] env[61974]: DEBUG nova.compute.manager [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1088.320586] env[61974]: DEBUG nova.network.neutron [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1088.414344] env[61974]: DEBUG nova.policy [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7d000dbe94f14f7296a630ae8c8f1353', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a1a2f7a8ac448ca8d5e0306eefb1d97', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1088.431907] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379481, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.480119] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379484, 'name': CreateVM_Task, 'duration_secs': 0.336374} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.480576] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1088.480984] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1088.481312] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.481508] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1088.481758] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a543940-9acb-4e44-a789-c8cd2a73e897 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.486044] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Waiting for the task: (returnval){ [ 1088.486044] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52830c41-9380-6017-4c23-dd0ee9feae13" [ 1088.486044] env[61974]: _type = "Task" [ 1088.486044] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.495349] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52830c41-9380-6017-4c23-dd0ee9feae13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.519162] env[61974]: DEBUG nova.network.neutron [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Successfully updated port: 5a34a1d1-d7f7-446b-ac5a-8c1da1db74ee {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1088.660556] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f277d4b9-ff6d-481d-b4b4-39a37ce5ce8e tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Releasing lock "refresh_cache-1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1088.662402] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8eee009-a44f-40a1-a4ca-7550e7782af5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.665419] env[61974]: INFO nova.compute.manager [-] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Took 1.22 seconds to deallocate network for instance. [ 1088.672239] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-f277d4b9-ff6d-481d-b4b4-39a37ce5ce8e tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Resuming the VM {{(pid=61974) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1088.672713] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df8c39eb-e09f-49d5-b1b3-59b0f09bd469 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.678689] env[61974]: DEBUG oslo_vmware.api [None req-f277d4b9-ff6d-481d-b4b4-39a37ce5ce8e tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 1088.678689] env[61974]: value = "task-1379485" [ 1088.678689] env[61974]: _type = "Task" [ 1088.678689] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.686888] env[61974]: DEBUG oslo_vmware.api [None req-f277d4b9-ff6d-481d-b4b4-39a37ce5ce8e tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379485, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.824200] env[61974]: DEBUG nova.compute.manager [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1088.856762] env[61974]: DEBUG nova.network.neutron [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Successfully created port: 670c53d8-5b5e-412b-9af9-48b50c98a404 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1088.932598] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379481, 'name': ReconfigVM_Task, 'duration_secs': 0.81246} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.933583] env[61974]: DEBUG nova.network.neutron [req-82662fe2-595f-4a0c-84ce-ec1408acb228 req-4c4a198c-9c34-4ccf-8175-83745a89b8b4 service nova] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Updated VIF entry in instance network info cache for port d71dea12-f0a5-480d-ad8c-3957db2708ee. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1088.933977] env[61974]: DEBUG nova.network.neutron [req-82662fe2-595f-4a0c-84ce-ec1408acb228 req-4c4a198c-9c34-4ccf-8175-83745a89b8b4 service nova] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Updating instance_info_cache with network_info: [{"id": "d71dea12-f0a5-480d-ad8c-3957db2708ee", "address": "fa:16:3e:19:d3:74", "network": {"id": "2e5b2d81-dc13-418f-80bb-9289bafc6cf2", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-520958173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44c3e02b442a42d2865633bbe651bf76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c80243e-93a7-4a95-bc8d-e9534bacd66e", "external-id": "nsx-vlan-transportzone-306", "segmentation_id": 306, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd71dea12-f0", "ovs_interfaceid": "d71dea12-f0a5-480d-ad8c-3957db2708ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.935277] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Reconfigured VM instance instance-00000065 to attach disk [datastore2] 33d2889a-7f80-4d65-8325-91355c9bcb46/33d2889a-7f80-4d65-8325-91355c9bcb46.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1088.935956] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df179a1b-33ec-4121-a729-87e7fce65c34 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.943480] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1088.943480] env[61974]: value = "task-1379486" [ 1088.943480] env[61974]: _type = "Task" [ 1088.943480] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.955104] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379486, 'name': Rename_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.995786] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52830c41-9380-6017-4c23-dd0ee9feae13, 'name': SearchDatastore_Task, 'duration_secs': 0.062368} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.996104] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1088.996430] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1088.996717] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1088.996879] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.997075] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1088.997338] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e762c75b-c434-461a-b45c-f8f15e01d994 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.007019] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1089.007019] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1089.007019] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65591fc2-7c23-493b-8c6b-f399c32fc5cb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.010721] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Waiting for the task: (returnval){ [ 1089.010721] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52c536f9-3121-0471-93dc-6f428318dd64" [ 1089.010721] env[61974]: _type = "Task" [ 1089.010721] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.018465] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52c536f9-3121-0471-93dc-6f428318dd64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.023040] env[61974]: DEBUG oslo_concurrency.lockutils [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "refresh_cache-14a74bf6-712b-4b82-a24f-6367d5180c6a" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.023176] env[61974]: DEBUG oslo_concurrency.lockutils [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired lock "refresh_cache-14a74bf6-712b-4b82-a24f-6367d5180c6a" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.023324] env[61974]: DEBUG nova.network.neutron [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1089.174439] env[61974]: DEBUG oslo_concurrency.lockutils [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.174781] env[61974]: DEBUG oslo_concurrency.lockutils [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.175025] env[61974]: DEBUG nova.objects.instance [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lazy-loading 'resources' on Instance uuid 584ce365-9125-4c2a-9668-f921beb599e0 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1089.192323] env[61974]: DEBUG oslo_vmware.api [None req-f277d4b9-ff6d-481d-b4b4-39a37ce5ce8e tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379485, 'name': PowerOnVM_Task} progress is 93%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.439507] env[61974]: DEBUG oslo_concurrency.lockutils [req-82662fe2-595f-4a0c-84ce-ec1408acb228 req-4c4a198c-9c34-4ccf-8175-83745a89b8b4 service nova] Releasing lock "refresh_cache-18559ea6-0cc4-4201-bafa-e63868753a06" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1089.439800] env[61974]: DEBUG nova.compute.manager [req-82662fe2-595f-4a0c-84ce-ec1408acb228 req-4c4a198c-9c34-4ccf-8175-83745a89b8b4 service nova] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Received event network-vif-deleted-a2cdd452-355a-4078-ad38-aba9f8f7e20b {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1089.440051] env[61974]: INFO nova.compute.manager [req-82662fe2-595f-4a0c-84ce-ec1408acb228 req-4c4a198c-9c34-4ccf-8175-83745a89b8b4 service nova] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Neutron deleted interface a2cdd452-355a-4078-ad38-aba9f8f7e20b; detaching it from the instance and deleting it from the info cache [ 1089.440244] env[61974]: DEBUG nova.network.neutron [req-82662fe2-595f-4a0c-84ce-ec1408acb228 req-4c4a198c-9c34-4ccf-8175-83745a89b8b4 service nova] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.457799] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379486, 'name': Rename_Task, 'duration_secs': 0.151259} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.458159] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1089.458496] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1e3884fa-31a9-4bda-97b8-0d82554b8629 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.467323] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1089.467323] env[61974]: value = "task-1379487" [ 1089.467323] env[61974]: _type = "Task" [ 1089.467323] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.480816] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379487, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.524019] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52c536f9-3121-0471-93dc-6f428318dd64, 'name': SearchDatastore_Task, 'duration_secs': 0.010698} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.524747] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0ae888a-44b7-4036-ae25-bd25dba28cae {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.534815] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Waiting for the task: (returnval){ [ 1089.534815] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52039dd3-05d0-6ca9-564b-89d39424ef82" [ 1089.534815] env[61974]: _type = "Task" [ 1089.534815] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.547023] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52039dd3-05d0-6ca9-564b-89d39424ef82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.576711] env[61974]: DEBUG nova.network.neutron [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1089.690945] env[61974]: DEBUG oslo_vmware.api [None req-f277d4b9-ff6d-481d-b4b4-39a37ce5ce8e tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379485, 'name': PowerOnVM_Task, 'duration_secs': 0.689053} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.693320] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-f277d4b9-ff6d-481d-b4b4-39a37ce5ce8e tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Resumed the VM {{(pid=61974) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1089.693515] env[61974]: DEBUG nova.compute.manager [None req-f277d4b9-ff6d-481d-b4b4-39a37ce5ce8e tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1089.694344] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f49170-1385-4fb0-99aa-22cbe8c23664 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.731626] env[61974]: DEBUG nova.network.neutron [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Updating instance_info_cache with network_info: [{"id": "5a34a1d1-d7f7-446b-ac5a-8c1da1db74ee", "address": "fa:16:3e:d3:7f:31", "network": {"id": "bbb72d46-05ed-4ca3-80a8-0e9b6e6ccb5d", "bridge": "br-int", "label": "tempest-ServersTestJSON-148366285-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4757d24b61794cfcaefff2ad44e02b74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a34a1d1-d7", "ovs_interfaceid": "5a34a1d1-d7f7-446b-ac5a-8c1da1db74ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.794470] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2794105e-59bb-41bd-ab8d-02b6ca180651 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.802718] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317d5a26-ae86-4030-9688-89feaa075f6a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.835824] env[61974]: DEBUG nova.compute.manager [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1089.838811] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41beda49-e3f5-457c-b8e6-72ba1d8532ea {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.846922] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd8ad1a-c109-4510-97fd-12db60f0bfb6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.863024] env[61974]: DEBUG nova.compute.provider_tree [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1089.873296] env[61974]: DEBUG nova.virt.hardware [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1089.873588] env[61974]: DEBUG nova.virt.hardware [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1089.873794] env[61974]: DEBUG nova.virt.hardware [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1089.874044] env[61974]: DEBUG nova.virt.hardware [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1089.874251] env[61974]: DEBUG nova.virt.hardware [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1089.874453] env[61974]: DEBUG nova.virt.hardware [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1089.874752] env[61974]: DEBUG nova.virt.hardware [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1089.874966] env[61974]: DEBUG nova.virt.hardware [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1089.875853] env[61974]: DEBUG nova.virt.hardware [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1089.875853] env[61974]: DEBUG nova.virt.hardware [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1089.875853] env[61974]: DEBUG nova.virt.hardware [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1089.876531] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2193c987-6b07-453f-850d-290441ea74ba {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.885479] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d5746d-5419-48ee-ae20-f42ed539e4d5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.942714] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-490dcc10-8e19-4d38-be4e-4d9148ad1bd5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.952201] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2196931-7013-40be-8f3c-ce4de2cce055 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.980733] env[61974]: DEBUG nova.compute.manager [req-82662fe2-595f-4a0c-84ce-ec1408acb228 req-4c4a198c-9c34-4ccf-8175-83745a89b8b4 service nova] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Detach interface failed, port_id=a2cdd452-355a-4078-ad38-aba9f8f7e20b, reason: Instance 584ce365-9125-4c2a-9668-f921beb599e0 could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1089.984583] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379487, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.044328] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52039dd3-05d0-6ca9-564b-89d39424ef82, 'name': SearchDatastore_Task, 'duration_secs': 0.01362} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.044739] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1090.045028] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 18559ea6-0cc4-4201-bafa-e63868753a06/18559ea6-0cc4-4201-bafa-e63868753a06.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1090.045300] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a7f3fedd-11b5-41ad-b72b-d3de5e8c3d05 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.051145] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Waiting for the task: (returnval){ [ 1090.051145] env[61974]: value = "task-1379488" [ 1090.051145] env[61974]: _type = "Task" [ 1090.051145] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.058398] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': task-1379488, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.111959] env[61974]: DEBUG nova.compute.manager [req-ec0967cf-a608-44b3-a83b-f6673bd163d8 req-ac028492-e0f4-4b9d-ad15-5aec2a94133b service nova] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Received event network-vif-plugged-5a34a1d1-d7f7-446b-ac5a-8c1da1db74ee {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1090.112235] env[61974]: DEBUG oslo_concurrency.lockutils [req-ec0967cf-a608-44b3-a83b-f6673bd163d8 req-ac028492-e0f4-4b9d-ad15-5aec2a94133b service nova] Acquiring lock "14a74bf6-712b-4b82-a24f-6367d5180c6a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.112466] env[61974]: DEBUG oslo_concurrency.lockutils [req-ec0967cf-a608-44b3-a83b-f6673bd163d8 req-ac028492-e0f4-4b9d-ad15-5aec2a94133b service nova] Lock "14a74bf6-712b-4b82-a24f-6367d5180c6a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.112641] env[61974]: DEBUG oslo_concurrency.lockutils [req-ec0967cf-a608-44b3-a83b-f6673bd163d8 req-ac028492-e0f4-4b9d-ad15-5aec2a94133b service nova] Lock "14a74bf6-712b-4b82-a24f-6367d5180c6a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.112813] env[61974]: DEBUG nova.compute.manager [req-ec0967cf-a608-44b3-a83b-f6673bd163d8 req-ac028492-e0f4-4b9d-ad15-5aec2a94133b service nova] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] No waiting events found dispatching network-vif-plugged-5a34a1d1-d7f7-446b-ac5a-8c1da1db74ee {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1090.112984] env[61974]: WARNING nova.compute.manager [req-ec0967cf-a608-44b3-a83b-f6673bd163d8 req-ac028492-e0f4-4b9d-ad15-5aec2a94133b service nova] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Received unexpected event network-vif-plugged-5a34a1d1-d7f7-446b-ac5a-8c1da1db74ee for instance with vm_state building and task_state spawning. [ 1090.113536] env[61974]: DEBUG nova.compute.manager [req-ec0967cf-a608-44b3-a83b-f6673bd163d8 req-ac028492-e0f4-4b9d-ad15-5aec2a94133b service nova] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Received event network-changed-5a34a1d1-d7f7-446b-ac5a-8c1da1db74ee {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1090.113822] env[61974]: DEBUG nova.compute.manager [req-ec0967cf-a608-44b3-a83b-f6673bd163d8 req-ac028492-e0f4-4b9d-ad15-5aec2a94133b service nova] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Refreshing instance network info cache due to event network-changed-5a34a1d1-d7f7-446b-ac5a-8c1da1db74ee. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1090.114061] env[61974]: DEBUG oslo_concurrency.lockutils [req-ec0967cf-a608-44b3-a83b-f6673bd163d8 req-ac028492-e0f4-4b9d-ad15-5aec2a94133b service nova] Acquiring lock "refresh_cache-14a74bf6-712b-4b82-a24f-6367d5180c6a" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1090.236552] env[61974]: DEBUG oslo_concurrency.lockutils [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Releasing lock "refresh_cache-14a74bf6-712b-4b82-a24f-6367d5180c6a" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1090.237552] env[61974]: DEBUG nova.compute.manager [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Instance network_info: |[{"id": "5a34a1d1-d7f7-446b-ac5a-8c1da1db74ee", "address": "fa:16:3e:d3:7f:31", "network": {"id": "bbb72d46-05ed-4ca3-80a8-0e9b6e6ccb5d", "bridge": "br-int", "label": "tempest-ServersTestJSON-148366285-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4757d24b61794cfcaefff2ad44e02b74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a34a1d1-d7", "ovs_interfaceid": "5a34a1d1-d7f7-446b-ac5a-8c1da1db74ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1090.237552] env[61974]: DEBUG oslo_concurrency.lockutils [req-ec0967cf-a608-44b3-a83b-f6673bd163d8 req-ac028492-e0f4-4b9d-ad15-5aec2a94133b service nova] Acquired lock "refresh_cache-14a74bf6-712b-4b82-a24f-6367d5180c6a" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.237552] env[61974]: DEBUG nova.network.neutron [req-ec0967cf-a608-44b3-a83b-f6673bd163d8 req-ac028492-e0f4-4b9d-ad15-5aec2a94133b service nova] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Refreshing network info cache for port 5a34a1d1-d7f7-446b-ac5a-8c1da1db74ee {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1090.238652] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:7f:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4728adca-2846-416a-91a3-deb898faf1f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a34a1d1-d7f7-446b-ac5a-8c1da1db74ee', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1090.247326] env[61974]: DEBUG oslo.service.loopingcall [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1090.248756] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1090.249036] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-353073c1-df16-45f3-bd16-537c9f918e5f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.269714] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1090.269714] env[61974]: value = "task-1379489" [ 1090.269714] env[61974]: _type = "Task" [ 1090.269714] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.277305] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379489, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.366687] env[61974]: DEBUG nova.scheduler.client.report [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1090.478822] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379487, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.489181] env[61974]: DEBUG nova.network.neutron [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Successfully updated port: 670c53d8-5b5e-412b-9af9-48b50c98a404 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1090.561308] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': task-1379488, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.781384] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379489, 'name': CreateVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.872973] env[61974]: DEBUG oslo_concurrency.lockutils [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.698s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.897489] env[61974]: INFO nova.scheduler.client.report [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Deleted allocations for instance 584ce365-9125-4c2a-9668-f921beb599e0 [ 1090.979634] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379487, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.993524] env[61974]: DEBUG oslo_concurrency.lockutils [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "refresh_cache-93409fd8-c9aa-427b-94b2-93f3db982786" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1090.993524] env[61974]: DEBUG oslo_concurrency.lockutils [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquired lock "refresh_cache-93409fd8-c9aa-427b-94b2-93f3db982786" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.993524] env[61974]: DEBUG nova.network.neutron [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1091.027246] env[61974]: DEBUG nova.network.neutron [req-ec0967cf-a608-44b3-a83b-f6673bd163d8 req-ac028492-e0f4-4b9d-ad15-5aec2a94133b service nova] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Updated VIF entry in instance network info cache for port 5a34a1d1-d7f7-446b-ac5a-8c1da1db74ee. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1091.027363] env[61974]: DEBUG nova.network.neutron [req-ec0967cf-a608-44b3-a83b-f6673bd163d8 req-ac028492-e0f4-4b9d-ad15-5aec2a94133b service nova] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Updating instance_info_cache with network_info: [{"id": "5a34a1d1-d7f7-446b-ac5a-8c1da1db74ee", "address": "fa:16:3e:d3:7f:31", "network": {"id": "bbb72d46-05ed-4ca3-80a8-0e9b6e6ccb5d", "bridge": "br-int", "label": "tempest-ServersTestJSON-148366285-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4757d24b61794cfcaefff2ad44e02b74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a34a1d1-d7", "ovs_interfaceid": "5a34a1d1-d7f7-446b-ac5a-8c1da1db74ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.062721] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': task-1379488, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.925265} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.063027] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 18559ea6-0cc4-4201-bafa-e63868753a06/18559ea6-0cc4-4201-bafa-e63868753a06.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1091.063258] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1091.063518] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-385f095b-e822-4b3b-8a9b-2a6b3062257b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.070922] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Waiting for the task: (returnval){ [ 1091.070922] env[61974]: value = "task-1379490" [ 1091.070922] env[61974]: _type = "Task" [ 1091.070922] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.079214] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': task-1379490, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.281038] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379489, 'name': CreateVM_Task, 'duration_secs': 0.579319} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.281038] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1091.281222] env[61974]: DEBUG oslo_concurrency.lockutils [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1091.281411] env[61974]: DEBUG oslo_concurrency.lockutils [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.281736] env[61974]: DEBUG oslo_concurrency.lockutils [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1091.281993] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81635839-cb6e-4180-8e28-bb4193a9bc71 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.286313] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1091.286313] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]529d8bea-16e1-9125-b662-9f9465b055e6" [ 1091.286313] env[61974]: _type = "Task" [ 1091.286313] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.293712] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]529d8bea-16e1-9125-b662-9f9465b055e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.406587] env[61974]: DEBUG oslo_concurrency.lockutils [None req-46f06222-ebaf-428c-bba1-56eefb7347a3 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "584ce365-9125-4c2a-9668-f921beb599e0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.566s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.478902] env[61974]: DEBUG oslo_vmware.api [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379487, 'name': PowerOnVM_Task, 'duration_secs': 1.995822} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.480254] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1091.480525] env[61974]: INFO nova.compute.manager [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Took 10.58 seconds to spawn the instance on the hypervisor. [ 1091.480719] env[61974]: DEBUG nova.compute.manager [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1091.481543] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d634f02-e99f-485f-a2df-ae57122de535 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.527787] env[61974]: DEBUG nova.network.neutron [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1091.531891] env[61974]: DEBUG oslo_concurrency.lockutils [req-ec0967cf-a608-44b3-a83b-f6673bd163d8 req-ac028492-e0f4-4b9d-ad15-5aec2a94133b service nova] Releasing lock "refresh_cache-14a74bf6-712b-4b82-a24f-6367d5180c6a" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.580143] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': task-1379490, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102161} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.580453] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1091.581227] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c266c3-9ce5-4b4f-a34b-164569214e13 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.606046] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] 18559ea6-0cc4-4201-bafa-e63868753a06/18559ea6-0cc4-4201-bafa-e63868753a06.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1091.606374] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-248cf925-d80f-4fb4-9263-279a75398146 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.625262] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Waiting for the task: (returnval){ [ 1091.625262] env[61974]: value = "task-1379491" [ 1091.625262] env[61974]: _type = "Task" [ 1091.625262] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.633834] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': task-1379491, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.693948] env[61974]: DEBUG nova.network.neutron [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Updating instance_info_cache with network_info: [{"id": "670c53d8-5b5e-412b-9af9-48b50c98a404", "address": "fa:16:3e:85:3f:76", "network": {"id": "39aeb319-53cd-43fa-bc5e-cb665f2e4707", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2053105632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a1a2f7a8ac448ca8d5e0306eefb1d97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap670c53d8-5b", "ovs_interfaceid": "670c53d8-5b5e-412b-9af9-48b50c98a404", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.796636] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]529d8bea-16e1-9125-b662-9f9465b055e6, 'name': SearchDatastore_Task, 'duration_secs': 0.036524} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.796976] env[61974]: DEBUG oslo_concurrency.lockutils [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.797238] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1091.797495] env[61974]: DEBUG oslo_concurrency.lockutils [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1091.797631] env[61974]: DEBUG oslo_concurrency.lockutils [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.797818] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1091.798115] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b75ec2b0-37cb-48da-9a48-c26ec2d86fe3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.812909] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1091.813133] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1091.813889] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-394c82f3-929b-40fa-a785-4ffde8a04415 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.819644] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1091.819644] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]522de5ff-f5d7-798e-5e47-4007950ff838" [ 1091.819644] env[61974]: _type = "Task" [ 1091.819644] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.828015] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]522de5ff-f5d7-798e-5e47-4007950ff838, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.998445] env[61974]: INFO nova.compute.manager [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Took 17.04 seconds to build instance. [ 1092.135882] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': task-1379491, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.196936] env[61974]: DEBUG oslo_concurrency.lockutils [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Releasing lock "refresh_cache-93409fd8-c9aa-427b-94b2-93f3db982786" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1092.197312] env[61974]: DEBUG nova.compute.manager [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Instance network_info: |[{"id": "670c53d8-5b5e-412b-9af9-48b50c98a404", "address": "fa:16:3e:85:3f:76", "network": {"id": "39aeb319-53cd-43fa-bc5e-cb665f2e4707", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2053105632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a1a2f7a8ac448ca8d5e0306eefb1d97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap670c53d8-5b", "ovs_interfaceid": "670c53d8-5b5e-412b-9af9-48b50c98a404", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1092.197782] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:3f:76', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b399c74-1411-408a-b4cd-84e268ae83fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '670c53d8-5b5e-412b-9af9-48b50c98a404', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1092.205847] env[61974]: DEBUG oslo.service.loopingcall [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1092.206099] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1092.206392] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-30ac23b9-28f3-4d49-85d2-e2e51ccdac97 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.226351] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1092.226351] env[61974]: value = "task-1379492" [ 1092.226351] env[61974]: _type = "Task" [ 1092.226351] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.231598] env[61974]: DEBUG nova.compute.manager [req-bdd26a02-a4d2-4c90-80d2-a994d1cbf958 req-577355da-8116-4b20-b379-82dea58f929b service nova] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Received event network-vif-plugged-670c53d8-5b5e-412b-9af9-48b50c98a404 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1092.231837] env[61974]: DEBUG oslo_concurrency.lockutils [req-bdd26a02-a4d2-4c90-80d2-a994d1cbf958 req-577355da-8116-4b20-b379-82dea58f929b service nova] Acquiring lock "93409fd8-c9aa-427b-94b2-93f3db982786-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.232058] env[61974]: DEBUG oslo_concurrency.lockutils [req-bdd26a02-a4d2-4c90-80d2-a994d1cbf958 req-577355da-8116-4b20-b379-82dea58f929b service nova] Lock "93409fd8-c9aa-427b-94b2-93f3db982786-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.232238] env[61974]: DEBUG oslo_concurrency.lockutils [req-bdd26a02-a4d2-4c90-80d2-a994d1cbf958 req-577355da-8116-4b20-b379-82dea58f929b service nova] Lock "93409fd8-c9aa-427b-94b2-93f3db982786-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1092.232412] env[61974]: DEBUG nova.compute.manager [req-bdd26a02-a4d2-4c90-80d2-a994d1cbf958 req-577355da-8116-4b20-b379-82dea58f929b service nova] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] No waiting events found dispatching network-vif-plugged-670c53d8-5b5e-412b-9af9-48b50c98a404 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1092.232581] env[61974]: WARNING nova.compute.manager [req-bdd26a02-a4d2-4c90-80d2-a994d1cbf958 req-577355da-8116-4b20-b379-82dea58f929b service nova] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Received unexpected event network-vif-plugged-670c53d8-5b5e-412b-9af9-48b50c98a404 for instance with vm_state building and task_state spawning. [ 1092.232750] env[61974]: DEBUG nova.compute.manager [req-bdd26a02-a4d2-4c90-80d2-a994d1cbf958 req-577355da-8116-4b20-b379-82dea58f929b service nova] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Received event network-changed-670c53d8-5b5e-412b-9af9-48b50c98a404 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1092.232909] env[61974]: DEBUG nova.compute.manager [req-bdd26a02-a4d2-4c90-80d2-a994d1cbf958 req-577355da-8116-4b20-b379-82dea58f929b service nova] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Refreshing instance network info cache due to event network-changed-670c53d8-5b5e-412b-9af9-48b50c98a404. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1092.233100] env[61974]: DEBUG oslo_concurrency.lockutils [req-bdd26a02-a4d2-4c90-80d2-a994d1cbf958 req-577355da-8116-4b20-b379-82dea58f929b service nova] Acquiring lock "refresh_cache-93409fd8-c9aa-427b-94b2-93f3db982786" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1092.233250] env[61974]: DEBUG oslo_concurrency.lockutils [req-bdd26a02-a4d2-4c90-80d2-a994d1cbf958 req-577355da-8116-4b20-b379-82dea58f929b service nova] Acquired lock "refresh_cache-93409fd8-c9aa-427b-94b2-93f3db982786" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.233411] env[61974]: DEBUG nova.network.neutron [req-bdd26a02-a4d2-4c90-80d2-a994d1cbf958 req-577355da-8116-4b20-b379-82dea58f929b service nova] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Refreshing network info cache for port 670c53d8-5b5e-412b-9af9-48b50c98a404 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1092.241788] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379492, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.331975] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]522de5ff-f5d7-798e-5e47-4007950ff838, 'name': SearchDatastore_Task, 'duration_secs': 0.011779} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.332862] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b928306-5e1c-443d-8a7e-92b8a4bb74d5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.338226] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1092.338226] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52d8ccb1-dd33-a96c-6964-0e05368d2348" [ 1092.338226] env[61974]: _type = "Task" [ 1092.338226] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.345777] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d8ccb1-dd33-a96c-6964-0e05368d2348, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.501053] env[61974]: DEBUG oslo_concurrency.lockutils [None req-0a5eef4c-3a93-4102-b041-88928ca2b328 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "33d2889a-7f80-4d65-8325-91355c9bcb46" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.550s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1092.636243] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': task-1379491, 'name': ReconfigVM_Task, 'duration_secs': 0.971461} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.636530] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Reconfigured VM instance instance-00000066 to attach disk [datastore2] 18559ea6-0cc4-4201-bafa-e63868753a06/18559ea6-0cc4-4201-bafa-e63868753a06.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1092.637216] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ecdb5b48-7c2f-4310-aca0-b37f62d2fa38 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.643825] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Waiting for the task: (returnval){ [ 1092.643825] env[61974]: value = "task-1379493" [ 1092.643825] env[61974]: _type = "Task" [ 1092.643825] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.652715] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': task-1379493, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.738057] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379492, 'name': CreateVM_Task, 'duration_secs': 0.392702} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.739139] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1092.739533] env[61974]: DEBUG oslo_concurrency.lockutils [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1092.739713] env[61974]: DEBUG oslo_concurrency.lockutils [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.740106] env[61974]: DEBUG oslo_concurrency.lockutils [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1092.740377] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6ea959a-e515-430d-895a-66c84df2b212 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.745514] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1092.745514] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5225ab15-17b3-9394-911d-b619a83eaf4b" [ 1092.745514] env[61974]: _type = "Task" [ 1092.745514] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.754935] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5225ab15-17b3-9394-911d-b619a83eaf4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.820376] env[61974]: DEBUG oslo_concurrency.lockutils [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "3b0762f6-2419-491e-8929-835853a320af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.821038] env[61974]: DEBUG oslo_concurrency.lockutils [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "3b0762f6-2419-491e-8929-835853a320af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.849892] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d8ccb1-dd33-a96c-6964-0e05368d2348, 'name': SearchDatastore_Task, 'duration_secs': 0.011679} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.850226] env[61974]: DEBUG oslo_concurrency.lockutils [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1092.850516] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 14a74bf6-712b-4b82-a24f-6367d5180c6a/14a74bf6-712b-4b82-a24f-6367d5180c6a.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1092.850822] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a4943592-1275-4e33-b43d-f2fc232f1292 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.859845] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1092.859845] env[61974]: value = "task-1379494" [ 1092.859845] env[61974]: _type = "Task" [ 1092.859845] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.867986] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379494, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.031393] env[61974]: DEBUG nova.network.neutron [req-bdd26a02-a4d2-4c90-80d2-a994d1cbf958 req-577355da-8116-4b20-b379-82dea58f929b service nova] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Updated VIF entry in instance network info cache for port 670c53d8-5b5e-412b-9af9-48b50c98a404. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1093.031765] env[61974]: DEBUG nova.network.neutron [req-bdd26a02-a4d2-4c90-80d2-a994d1cbf958 req-577355da-8116-4b20-b379-82dea58f929b service nova] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Updating instance_info_cache with network_info: [{"id": "670c53d8-5b5e-412b-9af9-48b50c98a404", "address": "fa:16:3e:85:3f:76", "network": {"id": "39aeb319-53cd-43fa-bc5e-cb665f2e4707", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2053105632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a1a2f7a8ac448ca8d5e0306eefb1d97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap670c53d8-5b", "ovs_interfaceid": "670c53d8-5b5e-412b-9af9-48b50c98a404", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.154109] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': task-1379493, 'name': Rename_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.256254] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5225ab15-17b3-9394-911d-b619a83eaf4b, 'name': SearchDatastore_Task, 'duration_secs': 0.014324} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.256539] env[61974]: DEBUG oslo_concurrency.lockutils [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1093.256780] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1093.257037] env[61974]: DEBUG oslo_concurrency.lockutils [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.257199] env[61974]: DEBUG oslo_concurrency.lockutils [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.257388] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1093.257649] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dacfbef1-7d3f-4c7f-a468-5ab103e6e063 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.265366] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1093.265550] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1093.266241] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-548a3820-b8f0-4d41-a1b6-4c6d6d15fc35 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.270866] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1093.270866] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52679a38-5a04-421d-14da-d042d4cfe31f" [ 1093.270866] env[61974]: _type = "Task" [ 1093.270866] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.277858] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52679a38-5a04-421d-14da-d042d4cfe31f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.323472] env[61974]: DEBUG nova.compute.manager [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1093.369558] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379494, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.534912] env[61974]: DEBUG oslo_concurrency.lockutils [req-bdd26a02-a4d2-4c90-80d2-a994d1cbf958 req-577355da-8116-4b20-b379-82dea58f929b service nova] Releasing lock "refresh_cache-93409fd8-c9aa-427b-94b2-93f3db982786" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1093.566070] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "33d2889a-7f80-4d65-8325-91355c9bcb46" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.566509] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "33d2889a-7f80-4d65-8325-91355c9bcb46" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.566875] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "33d2889a-7f80-4d65-8325-91355c9bcb46-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.567179] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "33d2889a-7f80-4d65-8325-91355c9bcb46-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.567468] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "33d2889a-7f80-4d65-8325-91355c9bcb46-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.570112] env[61974]: INFO nova.compute.manager [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Terminating instance [ 1093.573094] env[61974]: DEBUG nova.compute.manager [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1093.573391] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1093.574556] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8509474b-fec0-4ea9-9c5c-3a66c2dfc513 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.584310] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1093.584539] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d3dfeaa-e9d1-4533-891a-5f98f427e7e7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.591959] env[61974]: DEBUG oslo_vmware.api [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1093.591959] env[61974]: value = "task-1379495" [ 1093.591959] env[61974]: _type = "Task" [ 1093.591959] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.603200] env[61974]: DEBUG oslo_vmware.api [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379495, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.655367] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': task-1379493, 'name': Rename_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.782423] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52679a38-5a04-421d-14da-d042d4cfe31f, 'name': SearchDatastore_Task, 'duration_secs': 0.009031} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.783461] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cca3930-8ae5-4f94-a9fa-e49851f22716 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.790258] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1093.790258] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]520d1a5a-8808-29c8-92e7-958347b5240a" [ 1093.790258] env[61974]: _type = "Task" [ 1093.790258] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.800952] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]520d1a5a-8808-29c8-92e7-958347b5240a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.851852] env[61974]: DEBUG oslo_concurrency.lockutils [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.852184] env[61974]: DEBUG oslo_concurrency.lockutils [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.853808] env[61974]: INFO nova.compute.claims [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1093.869895] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379494, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.98137} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.870214] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 14a74bf6-712b-4b82-a24f-6367d5180c6a/14a74bf6-712b-4b82-a24f-6367d5180c6a.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1093.870466] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1093.870736] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6d5c1d3e-beec-4b60-b4af-79c0c0264717 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.877743] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1093.877743] env[61974]: value = "task-1379496" [ 1093.877743] env[61974]: _type = "Task" [ 1093.877743] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.886189] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379496, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.101610] env[61974]: DEBUG oslo_vmware.api [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379495, 'name': PowerOffVM_Task, 'duration_secs': 0.380404} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.101791] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1094.101899] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1094.102186] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-901c1619-54fd-4a6b-8199-8d6f7173ee78 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.154946] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': task-1379493, 'name': Rename_Task, 'duration_secs': 1.147296} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.155126] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1094.155254] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e342262-9018-4718-b372-ef16d04d6d7e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.161642] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Waiting for the task: (returnval){ [ 1094.161642] env[61974]: value = "task-1379498" [ 1094.161642] env[61974]: _type = "Task" [ 1094.161642] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.169343] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': task-1379498, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.196183] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1094.196415] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1094.196603] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Deleting the datastore file [datastore2] 33d2889a-7f80-4d65-8325-91355c9bcb46 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1094.196879] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f61674c0-ca4d-4f93-b795-945f280c66cf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.203637] env[61974]: DEBUG oslo_vmware.api [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for the task: (returnval){ [ 1094.203637] env[61974]: value = "task-1379499" [ 1094.203637] env[61974]: _type = "Task" [ 1094.203637] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.212760] env[61974]: DEBUG oslo_vmware.api [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379499, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.300914] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]520d1a5a-8808-29c8-92e7-958347b5240a, 'name': SearchDatastore_Task, 'duration_secs': 0.038345} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.301778] env[61974]: DEBUG oslo_concurrency.lockutils [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1094.302129] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 93409fd8-c9aa-427b-94b2-93f3db982786/93409fd8-c9aa-427b-94b2-93f3db982786.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1094.302457] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6bc59e99-c90e-443a-9385-8e643d866dd4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.309709] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1094.309709] env[61974]: value = "task-1379500" [ 1094.309709] env[61974]: _type = "Task" [ 1094.309709] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.317926] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379500, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.387591] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379496, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095539} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.387880] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1094.388682] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437681e1-190e-4ee5-b0c3-c29332a4545e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.412532] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 14a74bf6-712b-4b82-a24f-6367d5180c6a/14a74bf6-712b-4b82-a24f-6367d5180c6a.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1094.412848] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b1d78d7-bd79-483a-bc42-20efa1f50e85 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.432499] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1094.432499] env[61974]: value = "task-1379501" [ 1094.432499] env[61974]: _type = "Task" [ 1094.432499] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.441594] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379501, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.674194] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': task-1379498, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.715877] env[61974]: DEBUG oslo_vmware.api [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Task: {'id': task-1379499, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.245201} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.716236] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1094.716489] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1094.716749] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1094.716960] env[61974]: INFO nova.compute.manager [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1094.717305] env[61974]: DEBUG oslo.service.loopingcall [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1094.717575] env[61974]: DEBUG nova.compute.manager [-] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1094.717738] env[61974]: DEBUG nova.network.neutron [-] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1094.820298] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379500, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.945623] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379501, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.987908] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f857733d-aed5-40dc-be34-4dc11321e164 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.997856] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b811a5b-3525-4060-827f-4d92c70032b9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.006789] env[61974]: DEBUG nova.compute.manager [req-abca13f3-9834-4b96-9b7b-190110f59a73 req-72e3c188-2729-4419-85f6-c6f0d8c15ef6 service nova] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Received event network-vif-deleted-2da2f869-4db9-419c-9731-3cb2382bb153 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1095.007031] env[61974]: INFO nova.compute.manager [req-abca13f3-9834-4b96-9b7b-190110f59a73 req-72e3c188-2729-4419-85f6-c6f0d8c15ef6 service nova] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Neutron deleted interface 2da2f869-4db9-419c-9731-3cb2382bb153; detaching it from the instance and deleting it from the info cache [ 1095.007241] env[61974]: DEBUG nova.network.neutron [req-abca13f3-9834-4b96-9b7b-190110f59a73 req-72e3c188-2729-4419-85f6-c6f0d8c15ef6 service nova] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.035704] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f478764a-3077-4577-abb0-cdeb0e65ea21 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.038188] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-951664cb-dc02-45a7-9484-d55e238bc99a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.046063] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7fad82b-2d7d-4966-b997-cde30668ff7d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.052782] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b343adc-bed6-44ad-bff8-86212dd95401 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.071958] env[61974]: DEBUG nova.compute.provider_tree [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1095.081327] env[61974]: DEBUG nova.compute.manager [req-abca13f3-9834-4b96-9b7b-190110f59a73 req-72e3c188-2729-4419-85f6-c6f0d8c15ef6 service nova] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Detach interface failed, port_id=2da2f869-4db9-419c-9731-3cb2382bb153, reason: Instance 33d2889a-7f80-4d65-8325-91355c9bcb46 could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1095.082363] env[61974]: DEBUG nova.scheduler.client.report [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1095.174077] env[61974]: DEBUG oslo_vmware.api [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': task-1379498, 'name': PowerOnVM_Task, 'duration_secs': 0.677437} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.174401] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1095.174612] env[61974]: INFO nova.compute.manager [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Took 9.83 seconds to spawn the instance on the hypervisor. [ 1095.174799] env[61974]: DEBUG nova.compute.manager [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1095.175683] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361ec43f-6da3-4707-b72a-8a135ae91c64 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.179333] env[61974]: DEBUG oslo_concurrency.lockutils [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.179572] env[61974]: DEBUG oslo_concurrency.lockutils [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.179792] env[61974]: DEBUG oslo_concurrency.lockutils [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.179992] env[61974]: DEBUG oslo_concurrency.lockutils [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.180218] env[61974]: DEBUG oslo_concurrency.lockutils [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.185867] env[61974]: INFO nova.compute.manager [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Terminating instance [ 1095.187618] env[61974]: DEBUG nova.compute.manager [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1095.187823] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1095.188610] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ebfc87-135f-4734-a313-ca9475a38d6d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.195640] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1095.195883] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-af8b93e9-aca9-4929-a8bf-fd536797cf78 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.201881] env[61974]: DEBUG oslo_vmware.api [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 1095.201881] env[61974]: value = "task-1379502" [ 1095.201881] env[61974]: _type = "Task" [ 1095.201881] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.209414] env[61974]: DEBUG oslo_vmware.api [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379502, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.320352] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379500, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.446061] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379501, 'name': ReconfigVM_Task, 'duration_secs': 0.607053} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.446061] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 14a74bf6-712b-4b82-a24f-6367d5180c6a/14a74bf6-712b-4b82-a24f-6367d5180c6a.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1095.446229] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-af4b344e-eec4-4420-90e5-4b34e513e129 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.452791] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1095.452791] env[61974]: value = "task-1379503" [ 1095.452791] env[61974]: _type = "Task" [ 1095.452791] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.464465] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379503, 'name': Rename_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.482237] env[61974]: DEBUG nova.network.neutron [-] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.588219] env[61974]: DEBUG oslo_concurrency.lockutils [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.736s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.588796] env[61974]: DEBUG nova.compute.manager [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1095.695633] env[61974]: INFO nova.compute.manager [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Took 15.79 seconds to build instance. [ 1095.712729] env[61974]: DEBUG oslo_vmware.api [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379502, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.823172] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379500, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.483099} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.823502] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 93409fd8-c9aa-427b-94b2-93f3db982786/93409fd8-c9aa-427b-94b2-93f3db982786.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1095.823731] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1095.824048] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b336e880-24e1-4bbf-9c59-5ceecb95b05f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.832680] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1095.832680] env[61974]: value = "task-1379504" [ 1095.832680] env[61974]: _type = "Task" [ 1095.832680] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.841801] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379504, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.963306] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379503, 'name': Rename_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.984596] env[61974]: INFO nova.compute.manager [-] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Took 1.27 seconds to deallocate network for instance. [ 1096.094287] env[61974]: DEBUG nova.compute.utils [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1096.095694] env[61974]: DEBUG nova.compute.manager [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1096.095862] env[61974]: DEBUG nova.network.neutron [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1096.133850] env[61974]: DEBUG nova.policy [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5fb3973c32a645fb82106b90ee5e33a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd104a741ebad47748ae5646356589fce', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1096.198254] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1de2faad-6f40-4a04-97b8-dbf4cdacb45e tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Lock "18559ea6-0cc4-4201-bafa-e63868753a06" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.301s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.211829] env[61974]: DEBUG oslo_vmware.api [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379502, 'name': PowerOffVM_Task, 'duration_secs': 0.831961} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.212118] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1096.212299] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1096.212685] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8862477d-353a-4eb1-9b7c-84e6a0cded2d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.344975] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379504, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071403} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.344975] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1096.345639] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6875b99c-7c4f-4673-9302-4bbb01519404 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.368545] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 93409fd8-c9aa-427b-94b2-93f3db982786/93409fd8-c9aa-427b-94b2-93f3db982786.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1096.369217] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e710aa0e-de48-465d-bc59-d3efeb344f3f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.392657] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1096.392657] env[61974]: value = "task-1379506" [ 1096.392657] env[61974]: _type = "Task" [ 1096.392657] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.403631] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379506, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.464030] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379503, 'name': Rename_Task, 'duration_secs': 0.525524} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.464222] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1096.464452] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e1e22186-26b9-4e6f-b8ba-4810b3658883 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.472854] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1096.472854] env[61974]: value = "task-1379507" [ 1096.472854] env[61974]: _type = "Task" [ 1096.472854] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.479189] env[61974]: DEBUG nova.network.neutron [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Successfully created port: b67804ff-e7c2-42a2-9b55-3965ed99c857 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1096.486733] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379507, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.492000] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.492390] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.492661] env[61974]: DEBUG nova.objects.instance [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lazy-loading 'resources' on Instance uuid 33d2889a-7f80-4d65-8325-91355c9bcb46 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1096.598659] env[61974]: DEBUG nova.compute.manager [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1096.903059] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379506, 'name': ReconfigVM_Task, 'duration_secs': 0.316215} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.903320] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 93409fd8-c9aa-427b-94b2-93f3db982786/93409fd8-c9aa-427b-94b2-93f3db982786.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1096.903952] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bccde519-3672-414c-bd8e-13ea8ccdaf62 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.911109] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1096.911109] env[61974]: value = "task-1379508" [ 1096.911109] env[61974]: _type = "Task" [ 1096.911109] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.919155] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379508, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.983249] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379507, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.139537] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba1cb52-7817-4828-9bfa-e9b4cf16a035 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.147381] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd007d4-5bd3-47ad-b7c0-4956bfd958f9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.178398] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d560ea2d-c07e-4f9b-9a87-53bd52b33541 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.186504] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829bd483-5fdc-436f-9512-99e50be90606 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.202350] env[61974]: DEBUG nova.compute.provider_tree [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1097.237596] env[61974]: DEBUG oslo_concurrency.lockutils [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Acquiring lock "18559ea6-0cc4-4201-bafa-e63868753a06" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1097.237865] env[61974]: DEBUG oslo_concurrency.lockutils [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Lock "18559ea6-0cc4-4201-bafa-e63868753a06" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.238116] env[61974]: DEBUG oslo_concurrency.lockutils [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Acquiring lock "18559ea6-0cc4-4201-bafa-e63868753a06-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1097.238323] env[61974]: DEBUG oslo_concurrency.lockutils [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Lock "18559ea6-0cc4-4201-bafa-e63868753a06-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.238499] env[61974]: DEBUG oslo_concurrency.lockutils [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Lock "18559ea6-0cc4-4201-bafa-e63868753a06-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1097.240940] env[61974]: INFO nova.compute.manager [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Terminating instance [ 1097.242799] env[61974]: DEBUG nova.compute.manager [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1097.242994] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1097.243819] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bedf15b9-cce5-404c-953f-165d25db1115 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.252451] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1097.252688] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c4c4251a-7b74-4f5d-b9d4-93caf14895e4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.259311] env[61974]: DEBUG oslo_vmware.api [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Waiting for the task: (returnval){ [ 1097.259311] env[61974]: value = "task-1379509" [ 1097.259311] env[61974]: _type = "Task" [ 1097.259311] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.267617] env[61974]: DEBUG oslo_vmware.api [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': task-1379509, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.422574] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379508, 'name': Rename_Task, 'duration_secs': 0.144144} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.422735] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1097.423035] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-769d0e2f-eaac-4293-a0ff-80f971b5a771 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.430652] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1097.430652] env[61974]: value = "task-1379510" [ 1097.430652] env[61974]: _type = "Task" [ 1097.430652] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.440407] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379510, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.483320] env[61974]: DEBUG oslo_vmware.api [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379507, 'name': PowerOnVM_Task, 'duration_secs': 0.66769} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.483650] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1097.483795] env[61974]: INFO nova.compute.manager [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Took 9.89 seconds to spawn the instance on the hypervisor. [ 1097.483980] env[61974]: DEBUG nova.compute.manager [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1097.484807] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0514f8-3887-42e5-9ba9-6a35f2cb3b46 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.608993] env[61974]: DEBUG nova.compute.manager [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1097.638209] env[61974]: DEBUG nova.virt.hardware [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1097.638497] env[61974]: DEBUG nova.virt.hardware [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1097.638715] env[61974]: DEBUG nova.virt.hardware [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1097.638939] env[61974]: DEBUG nova.virt.hardware [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1097.639109] env[61974]: DEBUG nova.virt.hardware [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1097.639267] env[61974]: DEBUG nova.virt.hardware [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1097.639481] env[61974]: DEBUG nova.virt.hardware [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1097.639646] env[61974]: DEBUG nova.virt.hardware [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1097.639822] env[61974]: DEBUG nova.virt.hardware [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1097.640050] env[61974]: DEBUG nova.virt.hardware [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1097.640782] env[61974]: DEBUG nova.virt.hardware [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1097.641338] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97cc0b8d-23a6-421a-ad0a-3128acb4cd54 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.652081] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b6cf4f-d5e9-4db9-a729-b41140d7b4cc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.705948] env[61974]: DEBUG nova.scheduler.client.report [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1097.770909] env[61974]: DEBUG oslo_vmware.api [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': task-1379509, 'name': PowerOffVM_Task, 'duration_secs': 0.263281} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.771215] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1097.771375] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1097.771654] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-54f6959b-d9fb-4fa3-839e-144da13f00c1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.854578] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1097.854897] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1097.856033] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Deleting the datastore file [datastore1] 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1097.856033] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eec13ff3-e1b2-40bb-b9ff-089c002c766e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.862577] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1097.862726] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1097.864024] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Deleting the datastore file [datastore2] 18559ea6-0cc4-4201-bafa-e63868753a06 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1097.864024] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9a04740-cfcb-4300-9894-3048f813ee0f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.867035] env[61974]: DEBUG oslo_vmware.api [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for the task: (returnval){ [ 1097.867035] env[61974]: value = "task-1379512" [ 1097.867035] env[61974]: _type = "Task" [ 1097.867035] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.872331] env[61974]: DEBUG oslo_vmware.api [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Waiting for the task: (returnval){ [ 1097.872331] env[61974]: value = "task-1379513" [ 1097.872331] env[61974]: _type = "Task" [ 1097.872331] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.881928] env[61974]: DEBUG oslo_vmware.api [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379512, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.888526] env[61974]: DEBUG oslo_vmware.api [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': task-1379513, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.942217] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379510, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.944687] env[61974]: DEBUG nova.compute.manager [req-a71ed332-41ef-4c5d-b226-6b517a1293e7 req-c324b29f-ae35-4889-84a8-3f0fbe802e4b service nova] [instance: 3b0762f6-2419-491e-8929-835853a320af] Received event network-vif-plugged-b67804ff-e7c2-42a2-9b55-3965ed99c857 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1097.944930] env[61974]: DEBUG oslo_concurrency.lockutils [req-a71ed332-41ef-4c5d-b226-6b517a1293e7 req-c324b29f-ae35-4889-84a8-3f0fbe802e4b service nova] Acquiring lock "3b0762f6-2419-491e-8929-835853a320af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1097.945195] env[61974]: DEBUG oslo_concurrency.lockutils [req-a71ed332-41ef-4c5d-b226-6b517a1293e7 req-c324b29f-ae35-4889-84a8-3f0fbe802e4b service nova] Lock "3b0762f6-2419-491e-8929-835853a320af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.945417] env[61974]: DEBUG oslo_concurrency.lockutils [req-a71ed332-41ef-4c5d-b226-6b517a1293e7 req-c324b29f-ae35-4889-84a8-3f0fbe802e4b service nova] Lock "3b0762f6-2419-491e-8929-835853a320af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1097.945638] env[61974]: DEBUG nova.compute.manager [req-a71ed332-41ef-4c5d-b226-6b517a1293e7 req-c324b29f-ae35-4889-84a8-3f0fbe802e4b service nova] [instance: 3b0762f6-2419-491e-8929-835853a320af] No waiting events found dispatching network-vif-plugged-b67804ff-e7c2-42a2-9b55-3965ed99c857 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1097.945850] env[61974]: WARNING nova.compute.manager [req-a71ed332-41ef-4c5d-b226-6b517a1293e7 req-c324b29f-ae35-4889-84a8-3f0fbe802e4b service nova] [instance: 3b0762f6-2419-491e-8929-835853a320af] Received unexpected event network-vif-plugged-b67804ff-e7c2-42a2-9b55-3965ed99c857 for instance with vm_state building and task_state spawning. [ 1098.003456] env[61974]: INFO nova.compute.manager [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Took 17.87 seconds to build instance. [ 1098.045838] env[61974]: DEBUG nova.network.neutron [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Successfully updated port: b67804ff-e7c2-42a2-9b55-3965ed99c857 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1098.212679] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.719s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.230434] env[61974]: INFO nova.scheduler.client.report [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Deleted allocations for instance 33d2889a-7f80-4d65-8325-91355c9bcb46 [ 1098.380050] env[61974]: DEBUG oslo_vmware.api [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Task: {'id': task-1379512, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17656} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.380671] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1098.380873] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1098.381073] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1098.381288] env[61974]: INFO nova.compute.manager [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Took 3.19 seconds to destroy the instance on the hypervisor. [ 1098.381549] env[61974]: DEBUG oslo.service.loopingcall [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1098.381756] env[61974]: DEBUG nova.compute.manager [-] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1098.381853] env[61974]: DEBUG nova.network.neutron [-] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1098.386088] env[61974]: DEBUG oslo_vmware.api [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Task: {'id': task-1379513, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157657} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.386613] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1098.386805] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1098.386990] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1098.387186] env[61974]: INFO nova.compute.manager [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1098.387410] env[61974]: DEBUG oslo.service.loopingcall [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1098.387594] env[61974]: DEBUG nova.compute.manager [-] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1098.387686] env[61974]: DEBUG nova.network.neutron [-] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1098.442831] env[61974]: DEBUG oslo_vmware.api [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379510, 'name': PowerOnVM_Task, 'duration_secs': 0.686935} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.443054] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1098.443268] env[61974]: INFO nova.compute.manager [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Took 8.61 seconds to spawn the instance on the hypervisor. [ 1098.443454] env[61974]: DEBUG nova.compute.manager [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1098.444269] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c160f614-c96b-4daa-8146-8cbc7da23123 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.505953] env[61974]: DEBUG oslo_concurrency.lockutils [None req-78e265e4-d06d-4883-806b-3e8f284b2b8f tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "14a74bf6-712b-4b82-a24f-6367d5180c6a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.382s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.548408] env[61974]: DEBUG oslo_concurrency.lockutils [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "refresh_cache-3b0762f6-2419-491e-8929-835853a320af" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1098.548551] env[61974]: DEBUG oslo_concurrency.lockutils [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired lock "refresh_cache-3b0762f6-2419-491e-8929-835853a320af" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.548703] env[61974]: DEBUG nova.network.neutron [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1098.738237] env[61974]: DEBUG oslo_concurrency.lockutils [None req-7d09d835-1493-4ad1-93ba-12625d05e9e5 tempest-ServerDiskConfigTestJSON-944006047 tempest-ServerDiskConfigTestJSON-944006047-project-member] Lock "33d2889a-7f80-4d65-8325-91355c9bcb46" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.172s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.963435] env[61974]: INFO nova.compute.manager [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Took 13.64 seconds to build instance. [ 1099.082725] env[61974]: DEBUG nova.network.neutron [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1099.295346] env[61974]: DEBUG nova.network.neutron [-] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.299674] env[61974]: DEBUG nova.network.neutron [-] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.471592] env[61974]: DEBUG oslo_concurrency.lockutils [None req-138c541f-ab8f-4188-b533-c792bcc459dd tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "93409fd8-c9aa-427b-94b2-93f3db982786" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.154s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.489202] env[61974]: DEBUG nova.network.neutron [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Updating instance_info_cache with network_info: [{"id": "b67804ff-e7c2-42a2-9b55-3965ed99c857", "address": "fa:16:3e:65:6e:7a", "network": {"id": "615a7a34-a392-45bd-ba4d-7b39605e520b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432153827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d104a741ebad47748ae5646356589fce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb67804ff-e7", "ovs_interfaceid": "b67804ff-e7c2-42a2-9b55-3965ed99c857", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.587244] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5ac391c2-a3a3-4eb5-9091-1ab98fdab545 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "14a74bf6-712b-4b82-a24f-6367d5180c6a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.587550] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5ac391c2-a3a3-4eb5-9091-1ab98fdab545 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "14a74bf6-712b-4b82-a24f-6367d5180c6a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.587744] env[61974]: DEBUG nova.compute.manager [None req-5ac391c2-a3a3-4eb5-9091-1ab98fdab545 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1099.588722] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31457950-a4f1-40ac-a7ee-ac94ca248000 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.596659] env[61974]: DEBUG nova.compute.manager [None req-5ac391c2-a3a3-4eb5-9091-1ab98fdab545 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61974) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1099.597254] env[61974]: DEBUG nova.objects.instance [None req-5ac391c2-a3a3-4eb5-9091-1ab98fdab545 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lazy-loading 'flavor' on Instance uuid 14a74bf6-712b-4b82-a24f-6367d5180c6a {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1099.798839] env[61974]: INFO nova.compute.manager [-] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Took 1.42 seconds to deallocate network for instance. [ 1099.801151] env[61974]: INFO nova.compute.manager [-] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Took 1.41 seconds to deallocate network for instance. [ 1099.993875] env[61974]: DEBUG nova.compute.manager [req-107daead-7789-4400-a817-cb4c6550a51e req-48f32892-3e1d-43e4-be6a-04c5b2f9e815 service nova] [instance: 3b0762f6-2419-491e-8929-835853a320af] Received event network-changed-b67804ff-e7c2-42a2-9b55-3965ed99c857 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1099.994112] env[61974]: DEBUG nova.compute.manager [req-107daead-7789-4400-a817-cb4c6550a51e req-48f32892-3e1d-43e4-be6a-04c5b2f9e815 service nova] [instance: 3b0762f6-2419-491e-8929-835853a320af] Refreshing instance network info cache due to event network-changed-b67804ff-e7c2-42a2-9b55-3965ed99c857. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1099.994321] env[61974]: DEBUG oslo_concurrency.lockutils [req-107daead-7789-4400-a817-cb4c6550a51e req-48f32892-3e1d-43e4-be6a-04c5b2f9e815 service nova] Acquiring lock "refresh_cache-3b0762f6-2419-491e-8929-835853a320af" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.995460] env[61974]: DEBUG oslo_concurrency.lockutils [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Releasing lock "refresh_cache-3b0762f6-2419-491e-8929-835853a320af" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1099.995793] env[61974]: DEBUG nova.compute.manager [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Instance network_info: |[{"id": "b67804ff-e7c2-42a2-9b55-3965ed99c857", "address": "fa:16:3e:65:6e:7a", "network": {"id": "615a7a34-a392-45bd-ba4d-7b39605e520b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432153827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d104a741ebad47748ae5646356589fce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb67804ff-e7", "ovs_interfaceid": "b67804ff-e7c2-42a2-9b55-3965ed99c857", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1099.996381] env[61974]: DEBUG oslo_concurrency.lockutils [req-107daead-7789-4400-a817-cb4c6550a51e req-48f32892-3e1d-43e4-be6a-04c5b2f9e815 service nova] Acquired lock "refresh_cache-3b0762f6-2419-491e-8929-835853a320af" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.996574] env[61974]: DEBUG nova.network.neutron [req-107daead-7789-4400-a817-cb4c6550a51e req-48f32892-3e1d-43e4-be6a-04c5b2f9e815 service nova] [instance: 3b0762f6-2419-491e-8929-835853a320af] Refreshing network info cache for port b67804ff-e7c2-42a2-9b55-3965ed99c857 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1100.001022] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:6e:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa09e855-8af1-419b-b78d-8ffcc94b1bfb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b67804ff-e7c2-42a2-9b55-3965ed99c857', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1100.005384] env[61974]: DEBUG oslo.service.loopingcall [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1100.006637] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b0762f6-2419-491e-8929-835853a320af] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1100.006881] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4732c8a8-c6d0-4ae0-b45b-1e1c4286fb17 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.031406] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1100.031406] env[61974]: value = "task-1379514" [ 1100.031406] env[61974]: _type = "Task" [ 1100.031406] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.041757] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379514, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.102196] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ac391c2-a3a3-4eb5-9091-1ab98fdab545 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1100.102481] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-567eb92e-947b-41d0-b86e-2b9cddd48f80 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.112456] env[61974]: DEBUG oslo_vmware.api [None req-5ac391c2-a3a3-4eb5-9091-1ab98fdab545 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1100.112456] env[61974]: value = "task-1379515" [ 1100.112456] env[61974]: _type = "Task" [ 1100.112456] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.127819] env[61974]: DEBUG oslo_vmware.api [None req-5ac391c2-a3a3-4eb5-9091-1ab98fdab545 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379515, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.307832] env[61974]: DEBUG oslo_concurrency.lockutils [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.308291] env[61974]: DEBUG oslo_concurrency.lockutils [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.308657] env[61974]: DEBUG nova.objects.instance [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lazy-loading 'resources' on Instance uuid 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1100.310963] env[61974]: DEBUG oslo_concurrency.lockutils [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.543422] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379514, 'name': CreateVM_Task} progress is 25%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.626093] env[61974]: DEBUG oslo_vmware.api [None req-5ac391c2-a3a3-4eb5-9091-1ab98fdab545 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379515, 'name': PowerOffVM_Task, 'duration_secs': 0.329693} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.629994] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ac391c2-a3a3-4eb5-9091-1ab98fdab545 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1100.630458] env[61974]: DEBUG nova.compute.manager [None req-5ac391c2-a3a3-4eb5-9091-1ab98fdab545 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1100.631554] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588abefe-2c5b-403b-bd69-3429fa6dfb5c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.825352] env[61974]: DEBUG nova.network.neutron [req-107daead-7789-4400-a817-cb4c6550a51e req-48f32892-3e1d-43e4-be6a-04c5b2f9e815 service nova] [instance: 3b0762f6-2419-491e-8929-835853a320af] Updated VIF entry in instance network info cache for port b67804ff-e7c2-42a2-9b55-3965ed99c857. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1100.825352] env[61974]: DEBUG nova.network.neutron [req-107daead-7789-4400-a817-cb4c6550a51e req-48f32892-3e1d-43e4-be6a-04c5b2f9e815 service nova] [instance: 3b0762f6-2419-491e-8929-835853a320af] Updating instance_info_cache with network_info: [{"id": "b67804ff-e7c2-42a2-9b55-3965ed99c857", "address": "fa:16:3e:65:6e:7a", "network": {"id": "615a7a34-a392-45bd-ba4d-7b39605e520b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432153827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d104a741ebad47748ae5646356589fce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb67804ff-e7", "ovs_interfaceid": "b67804ff-e7c2-42a2-9b55-3965ed99c857", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.946659] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c9dab8-a477-4077-b6dc-60e8aa3e591c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.955253] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb54f2fe-45bb-424b-89c4-2dcd25a56a0e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.992198] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c828eb92-bcb8-4e0c-823a-6811717a9b4a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.001023] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b1da043-f82c-4cc6-859c-af900215ec47 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.018754] env[61974]: DEBUG nova.compute.provider_tree [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1101.044689] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379514, 'name': CreateVM_Task, 'duration_secs': 0.645987} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.044821] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b0762f6-2419-491e-8929-835853a320af] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1101.045508] env[61974]: DEBUG oslo_concurrency.lockutils [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1101.045770] env[61974]: DEBUG oslo_concurrency.lockutils [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.046182] env[61974]: DEBUG oslo_concurrency.lockutils [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1101.047183] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc7c078a-41ec-4ce3-b4bd-0a073af9448c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.053355] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1101.053355] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52062714-0a4f-1fd0-e949-52f5414624fb" [ 1101.053355] env[61974]: _type = "Task" [ 1101.053355] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.064339] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52062714-0a4f-1fd0-e949-52f5414624fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.147674] env[61974]: DEBUG oslo_concurrency.lockutils [None req-5ac391c2-a3a3-4eb5-9091-1ab98fdab545 tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "14a74bf6-712b-4b82-a24f-6367d5180c6a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.560s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.329367] env[61974]: DEBUG oslo_concurrency.lockutils [req-107daead-7789-4400-a817-cb4c6550a51e req-48f32892-3e1d-43e4-be6a-04c5b2f9e815 service nova] Releasing lock "refresh_cache-3b0762f6-2419-491e-8929-835853a320af" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1101.330057] env[61974]: DEBUG nova.compute.manager [req-107daead-7789-4400-a817-cb4c6550a51e req-48f32892-3e1d-43e4-be6a-04c5b2f9e815 service nova] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Received event network-vif-deleted-e377f334-8d36-4f17-8532-abbd37c47eba {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1101.330440] env[61974]: DEBUG nova.compute.manager [req-107daead-7789-4400-a817-cb4c6550a51e req-48f32892-3e1d-43e4-be6a-04c5b2f9e815 service nova] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Received event network-vif-deleted-d71dea12-f0a5-480d-ad8c-3957db2708ee {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1101.521937] env[61974]: DEBUG nova.scheduler.client.report [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1101.565144] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52062714-0a4f-1fd0-e949-52f5414624fb, 'name': SearchDatastore_Task, 'duration_secs': 0.010176} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.565709] env[61974]: DEBUG oslo_concurrency.lockutils [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1101.566054] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1101.566467] env[61974]: DEBUG oslo_concurrency.lockutils [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1101.566655] env[61974]: DEBUG oslo_concurrency.lockutils [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.566935] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1101.567357] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1de65849-9f74-42ce-93f9-9eb9e09a6aa7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.578886] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1101.578886] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1101.578886] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0daf61c2-594a-41c5-86b9-645f7ba194dc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.584763] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1101.584763] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]526e9dcd-7d66-d6f9-f3e2-11e523d42473" [ 1101.584763] env[61974]: _type = "Task" [ 1101.584763] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.594380] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]526e9dcd-7d66-d6f9-f3e2-11e523d42473, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.019055] env[61974]: DEBUG nova.compute.manager [req-13a47f80-52cd-4057-bd9e-a8f2d7e3b0cb req-4cfe0865-6906-4d75-baea-f86cf04f3eb2 service nova] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Received event network-changed-670c53d8-5b5e-412b-9af9-48b50c98a404 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1102.019294] env[61974]: DEBUG nova.compute.manager [req-13a47f80-52cd-4057-bd9e-a8f2d7e3b0cb req-4cfe0865-6906-4d75-baea-f86cf04f3eb2 service nova] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Refreshing instance network info cache due to event network-changed-670c53d8-5b5e-412b-9af9-48b50c98a404. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1102.019512] env[61974]: DEBUG oslo_concurrency.lockutils [req-13a47f80-52cd-4057-bd9e-a8f2d7e3b0cb req-4cfe0865-6906-4d75-baea-f86cf04f3eb2 service nova] Acquiring lock "refresh_cache-93409fd8-c9aa-427b-94b2-93f3db982786" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1102.020153] env[61974]: DEBUG oslo_concurrency.lockutils [req-13a47f80-52cd-4057-bd9e-a8f2d7e3b0cb req-4cfe0865-6906-4d75-baea-f86cf04f3eb2 service nova] Acquired lock "refresh_cache-93409fd8-c9aa-427b-94b2-93f3db982786" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.020489] env[61974]: DEBUG nova.network.neutron [req-13a47f80-52cd-4057-bd9e-a8f2d7e3b0cb req-4cfe0865-6906-4d75-baea-f86cf04f3eb2 service nova] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Refreshing network info cache for port 670c53d8-5b5e-412b-9af9-48b50c98a404 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1102.027698] env[61974]: DEBUG oslo_concurrency.lockutils [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.719s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.029400] env[61974]: DEBUG oslo_concurrency.lockutils [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.719s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.029637] env[61974]: DEBUG nova.objects.instance [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Lazy-loading 'resources' on Instance uuid 18559ea6-0cc4-4201-bafa-e63868753a06 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1102.051579] env[61974]: INFO nova.scheduler.client.report [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Deleted allocations for instance 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb [ 1102.095842] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]526e9dcd-7d66-d6f9-f3e2-11e523d42473, 'name': SearchDatastore_Task, 'duration_secs': 0.0096} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.097372] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ceb2434-3c3f-4d08-b6b0-585b34d96979 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.102219] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1102.102219] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52914c6a-8d12-ad46-98dc-3415d5469d4f" [ 1102.102219] env[61974]: _type = "Task" [ 1102.102219] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.110388] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52914c6a-8d12-ad46-98dc-3415d5469d4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.564074] env[61974]: DEBUG oslo_concurrency.lockutils [None req-90893b9d-c9ed-4a50-93cb-8dcc4939090b tempest-ServersNegativeTestJSON-1068579309 tempest-ServersNegativeTestJSON-1068579309-project-member] Lock "1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.384s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.613894] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52914c6a-8d12-ad46-98dc-3415d5469d4f, 'name': SearchDatastore_Task, 'duration_secs': 0.024579} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.616488] env[61974]: DEBUG oslo_concurrency.lockutils [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1102.616756] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 3b0762f6-2419-491e-8929-835853a320af/3b0762f6-2419-491e-8929-835853a320af.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1102.617476] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c98eb4c-6da1-48a4-a8fb-2e3755e149cf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.624102] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1102.624102] env[61974]: value = "task-1379516" [ 1102.624102] env[61974]: _type = "Task" [ 1102.624102] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.634232] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379516, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.646224] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "14a74bf6-712b-4b82-a24f-6367d5180c6a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.646474] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "14a74bf6-712b-4b82-a24f-6367d5180c6a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.646683] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "14a74bf6-712b-4b82-a24f-6367d5180c6a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.646871] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "14a74bf6-712b-4b82-a24f-6367d5180c6a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.647135] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "14a74bf6-712b-4b82-a24f-6367d5180c6a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.650882] env[61974]: INFO nova.compute.manager [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Terminating instance [ 1102.653038] env[61974]: DEBUG nova.compute.manager [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1102.653252] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1102.654350] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12f144c-f2fe-46a6-be1b-e0ded85005b5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.662041] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1102.664859] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b505bca-9d67-4423-91d1-dbf5aa46fb0b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.677793] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0335a243-c2bb-4be8-b2a2-493839236db3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.684632] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eef5f85-dd35-4787-a731-f0fc8cbf476e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.720016] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65e0fec-95a1-4deb-80bb-fb77e742963b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.728056] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f67810-03dd-43c3-87d6-35f5a248abce {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.742334] env[61974]: DEBUG nova.compute.provider_tree [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1102.756148] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1102.756148] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1102.756418] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Deleting the datastore file [datastore2] 14a74bf6-712b-4b82-a24f-6367d5180c6a {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1102.756904] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-66de35fe-a519-4f9b-b2d8-7587170f2c9a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.767130] env[61974]: DEBUG oslo_vmware.api [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for the task: (returnval){ [ 1102.767130] env[61974]: value = "task-1379518" [ 1102.767130] env[61974]: _type = "Task" [ 1102.767130] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.775979] env[61974]: DEBUG oslo_vmware.api [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379518, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.909386] env[61974]: DEBUG nova.network.neutron [req-13a47f80-52cd-4057-bd9e-a8f2d7e3b0cb req-4cfe0865-6906-4d75-baea-f86cf04f3eb2 service nova] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Updated VIF entry in instance network info cache for port 670c53d8-5b5e-412b-9af9-48b50c98a404. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1102.909816] env[61974]: DEBUG nova.network.neutron [req-13a47f80-52cd-4057-bd9e-a8f2d7e3b0cb req-4cfe0865-6906-4d75-baea-f86cf04f3eb2 service nova] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Updating instance_info_cache with network_info: [{"id": "670c53d8-5b5e-412b-9af9-48b50c98a404", "address": "fa:16:3e:85:3f:76", "network": {"id": "39aeb319-53cd-43fa-bc5e-cb665f2e4707", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2053105632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a1a2f7a8ac448ca8d5e0306eefb1d97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap670c53d8-5b", "ovs_interfaceid": "670c53d8-5b5e-412b-9af9-48b50c98a404", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.135932] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379516, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.245427] env[61974]: DEBUG nova.scheduler.client.report [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1103.278350] env[61974]: DEBUG oslo_vmware.api [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Task: {'id': task-1379518, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164879} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.278616] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1103.278807] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1103.278991] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1103.279191] env[61974]: INFO nova.compute.manager [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1103.279434] env[61974]: DEBUG oslo.service.loopingcall [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1103.279630] env[61974]: DEBUG nova.compute.manager [-] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1103.279724] env[61974]: DEBUG nova.network.neutron [-] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1103.413406] env[61974]: DEBUG oslo_concurrency.lockutils [req-13a47f80-52cd-4057-bd9e-a8f2d7e3b0cb req-4cfe0865-6906-4d75-baea-f86cf04f3eb2 service nova] Releasing lock "refresh_cache-93409fd8-c9aa-427b-94b2-93f3db982786" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1103.636192] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379516, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.563204} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.636192] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore1] 3b0762f6-2419-491e-8929-835853a320af/3b0762f6-2419-491e-8929-835853a320af.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1103.636192] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1103.636633] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a44ac07f-c3c1-4b3c-aab1-28d2673ca4f0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.643407] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1103.643407] env[61974]: value = "task-1379519" [ 1103.643407] env[61974]: _type = "Task" [ 1103.643407] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.653017] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379519, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.750808] env[61974]: DEBUG oslo_concurrency.lockutils [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.721s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.784878] env[61974]: INFO nova.scheduler.client.report [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Deleted allocations for instance 18559ea6-0cc4-4201-bafa-e63868753a06 [ 1104.070337] env[61974]: DEBUG nova.compute.manager [req-4654237a-81e2-4af0-aaa0-398b2c78d623 req-1c6be68b-bdc5-45e2-a569-d80be02cc52f service nova] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Received event network-vif-deleted-5a34a1d1-d7f7-446b-ac5a-8c1da1db74ee {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1104.070478] env[61974]: INFO nova.compute.manager [req-4654237a-81e2-4af0-aaa0-398b2c78d623 req-1c6be68b-bdc5-45e2-a569-d80be02cc52f service nova] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Neutron deleted interface 5a34a1d1-d7f7-446b-ac5a-8c1da1db74ee; detaching it from the instance and deleting it from the info cache [ 1104.070669] env[61974]: DEBUG nova.network.neutron [req-4654237a-81e2-4af0-aaa0-398b2c78d623 req-1c6be68b-bdc5-45e2-a569-d80be02cc52f service nova] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.072226] env[61974]: DEBUG nova.network.neutron [-] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.157305] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379519, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.129508} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.157305] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1104.157305] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1cb33bb-f602-45be-9ddb-39c68b027f84 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.176718] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 3b0762f6-2419-491e-8929-835853a320af/3b0762f6-2419-491e-8929-835853a320af.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1104.177093] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44237da5-2105-424f-bcf9-9a990e2e7c88 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.201237] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1104.201237] env[61974]: value = "task-1379520" [ 1104.201237] env[61974]: _type = "Task" [ 1104.201237] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.211067] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379520, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.298501] env[61974]: DEBUG oslo_concurrency.lockutils [None req-3bb2ab66-e9e9-414d-b7f4-399446a6228b tempest-ServersNegativeTestMultiTenantJSON-1507265719 tempest-ServersNegativeTestMultiTenantJSON-1507265719-project-member] Lock "18559ea6-0cc4-4201-bafa-e63868753a06" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.060s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.575372] env[61974]: INFO nova.compute.manager [-] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Took 1.30 seconds to deallocate network for instance. [ 1104.575699] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-716ac371-2dbe-475b-88ea-086167fc9666 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.590444] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf46d83-a724-4ff0-bd0e-ba69e29c5965 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.619350] env[61974]: DEBUG nova.compute.manager [req-4654237a-81e2-4af0-aaa0-398b2c78d623 req-1c6be68b-bdc5-45e2-a569-d80be02cc52f service nova] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Detach interface failed, port_id=5a34a1d1-d7f7-446b-ac5a-8c1da1db74ee, reason: Instance 14a74bf6-712b-4b82-a24f-6367d5180c6a could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1104.714592] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379520, 'name': ReconfigVM_Task, 'duration_secs': 0.323086} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.714592] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 3b0762f6-2419-491e-8929-835853a320af/3b0762f6-2419-491e-8929-835853a320af.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1104.715096] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-71168e3d-5698-4e9c-8eec-ba1b7531e274 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.721878] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1104.721878] env[61974]: value = "task-1379521" [ 1104.721878] env[61974]: _type = "Task" [ 1104.721878] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.733723] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379521, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.084861] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.085226] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.085487] env[61974]: DEBUG nova.objects.instance [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lazy-loading 'resources' on Instance uuid 14a74bf6-712b-4b82-a24f-6367d5180c6a {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1105.235054] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379521, 'name': Rename_Task, 'duration_secs': 0.292877} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.235549] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1105.235908] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1b1f7115-3b82-4e1b-98ee-7d8edf943b8a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.244511] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1105.244511] env[61974]: value = "task-1379522" [ 1105.244511] env[61974]: _type = "Task" [ 1105.244511] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.256849] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379522, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.443399] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Acquiring lock "79be0650-faba-4516-b7cb-a25d45e941ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.443972] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Lock "79be0650-faba-4516-b7cb-a25d45e941ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.709291] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9caefac-1593-44cd-8315-a06e767b5023 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.718524] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-740b5514-3bcc-478a-96e5-294e77c597fb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.755968] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ef3b17-d377-4c6e-9148-51c594cdb813 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.764583] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379522, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.772021] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b556f1-7e36-41a9-bb0f-297b7e118305 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.784900] env[61974]: DEBUG nova.compute.provider_tree [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1105.871551] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bf076250-6ff1-40b5-8d31-e40cd2da0eb1 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.874365] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bf076250-6ff1-40b5-8d31-e40cd2da0eb1 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.946936] env[61974]: DEBUG nova.compute.manager [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1106.261093] env[61974]: DEBUG oslo_vmware.api [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379522, 'name': PowerOnVM_Task, 'duration_secs': 0.666485} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.261445] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1106.261675] env[61974]: INFO nova.compute.manager [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Took 8.65 seconds to spawn the instance on the hypervisor. [ 1106.261862] env[61974]: DEBUG nova.compute.manager [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1106.262636] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2ad01e-5e72-42cd-bd16-084dd4ea4825 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.290024] env[61974]: DEBUG nova.scheduler.client.report [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1106.378098] env[61974]: DEBUG nova.compute.utils [None req-bf076250-6ff1-40b5-8d31-e40cd2da0eb1 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1106.471873] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.780582] env[61974]: INFO nova.compute.manager [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Took 12.95 seconds to build instance. [ 1106.792836] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.708s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1106.795317] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.324s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1106.798043] env[61974]: INFO nova.compute.claims [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1106.824021] env[61974]: INFO nova.scheduler.client.report [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Deleted allocations for instance 14a74bf6-712b-4b82-a24f-6367d5180c6a [ 1106.880598] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bf076250-6ff1-40b5-8d31-e40cd2da0eb1 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.282858] env[61974]: DEBUG oslo_concurrency.lockutils [None req-811277c8-69f0-4809-ade2-16ad397871e2 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "3b0762f6-2419-491e-8929-835853a320af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.462s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.329371] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1912ae5e-8f4d-460c-b9e9-b3ad697bc62a tempest-ServersTestJSON-692110769 tempest-ServersTestJSON-692110769-project-member] Lock "14a74bf6-712b-4b82-a24f-6367d5180c6a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.683s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.915859] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac3ab5eb-1989-40ea-94fb-60c536022bf1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.925880] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2cd4b19-bb82-4b9d-b7df-e748f46b6838 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.958848] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f8176b-4ae4-4079-ab60-5527df9b21e7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.966676] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1af994f-1ae0-41a1-8822-1169c0601549 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.971054] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bf076250-6ff1-40b5-8d31-e40cd2da0eb1 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.971054] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bf076250-6ff1-40b5-8d31-e40cd2da0eb1 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.971226] env[61974]: INFO nova.compute.manager [None req-bf076250-6ff1-40b5-8d31-e40cd2da0eb1 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Attaching volume b064d31a-890f-4a07-889e-b768586c162a to /dev/sdb [ 1107.984317] env[61974]: DEBUG nova.compute.provider_tree [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1108.018392] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f7266c-8092-449c-8dde-d94dc5a528f9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.025608] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2683308d-ee2e-463d-b655-a90624ee5cbe {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.039908] env[61974]: DEBUG nova.virt.block_device [None req-bf076250-6ff1-40b5-8d31-e40cd2da0eb1 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Updating existing volume attachment record: 63a99a85-8967-4d97-9985-b62580651429 {{(pid=61974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1108.490088] env[61974]: DEBUG nova.scheduler.client.report [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1108.557605] env[61974]: DEBUG nova.compute.manager [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Stashing vm_state: active {{(pid=61974) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1108.996069] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.201s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1108.996590] env[61974]: DEBUG nova.compute.manager [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1109.081292] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.081596] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.501571] env[61974]: DEBUG nova.compute.utils [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1109.503288] env[61974]: DEBUG nova.compute.manager [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1109.503288] env[61974]: DEBUG nova.network.neutron [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1109.570609] env[61974]: DEBUG nova.policy [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '37793a04c1d64f7fa90100accf56dee2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dee552977af4446889e2eeaead47d74a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1109.587526] env[61974]: INFO nova.compute.claims [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1109.821781] env[61974]: DEBUG nova.network.neutron [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Successfully created port: 0475f047-2c5e-454d-aad5-29379387f010 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1110.006550] env[61974]: DEBUG nova.compute.manager [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1110.097116] env[61974]: INFO nova.compute.resource_tracker [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Updating resource usage from migration 0669cb53-3eae-44eb-9128-9f397ddeaeec [ 1110.200653] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441cccbb-e5f6-4bff-8ba8-082645e95c47 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.208087] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11debadb-e8eb-49ce-bf39-6e36122b1e5d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.237238] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a1d6c8-18d2-44b4-a526-fe05da99079d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.244600] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c2f237-a101-4805-b242-b8488e82a46b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.257444] env[61974]: DEBUG nova.compute.provider_tree [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1110.760406] env[61974]: DEBUG nova.scheduler.client.report [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1111.015610] env[61974]: DEBUG nova.compute.manager [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1111.040413] env[61974]: DEBUG nova.virt.hardware [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1111.040693] env[61974]: DEBUG nova.virt.hardware [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1111.040858] env[61974]: DEBUG nova.virt.hardware [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1111.041060] env[61974]: DEBUG nova.virt.hardware [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1111.041216] env[61974]: DEBUG nova.virt.hardware [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1111.041399] env[61974]: DEBUG nova.virt.hardware [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1111.041611] env[61974]: DEBUG nova.virt.hardware [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1111.041774] env[61974]: DEBUG nova.virt.hardware [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1111.042376] env[61974]: DEBUG nova.virt.hardware [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1111.042376] env[61974]: DEBUG nova.virt.hardware [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1111.042376] env[61974]: DEBUG nova.virt.hardware [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1111.043199] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd243fcb-d7f8-4ce8-8cea-9888b9ab0177 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.051170] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15891226-68e2-48d9-b360-24a70728b664 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.257637] env[61974]: DEBUG nova.compute.manager [req-d1efcb14-cea2-4005-aa7e-26ca68f5e9c9 req-bae6b45b-4dc2-44c6-bd64-ffd1b1202579 service nova] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Received event network-vif-plugged-0475f047-2c5e-454d-aad5-29379387f010 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1111.257876] env[61974]: DEBUG oslo_concurrency.lockutils [req-d1efcb14-cea2-4005-aa7e-26ca68f5e9c9 req-bae6b45b-4dc2-44c6-bd64-ffd1b1202579 service nova] Acquiring lock "79be0650-faba-4516-b7cb-a25d45e941ee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.258111] env[61974]: DEBUG oslo_concurrency.lockutils [req-d1efcb14-cea2-4005-aa7e-26ca68f5e9c9 req-bae6b45b-4dc2-44c6-bd64-ffd1b1202579 service nova] Lock "79be0650-faba-4516-b7cb-a25d45e941ee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.258294] env[61974]: DEBUG oslo_concurrency.lockutils [req-d1efcb14-cea2-4005-aa7e-26ca68f5e9c9 req-bae6b45b-4dc2-44c6-bd64-ffd1b1202579 service nova] Lock "79be0650-faba-4516-b7cb-a25d45e941ee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.258474] env[61974]: DEBUG nova.compute.manager [req-d1efcb14-cea2-4005-aa7e-26ca68f5e9c9 req-bae6b45b-4dc2-44c6-bd64-ffd1b1202579 service nova] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] No waiting events found dispatching network-vif-plugged-0475f047-2c5e-454d-aad5-29379387f010 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1111.258713] env[61974]: WARNING nova.compute.manager [req-d1efcb14-cea2-4005-aa7e-26ca68f5e9c9 req-bae6b45b-4dc2-44c6-bd64-ffd1b1202579 service nova] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Received unexpected event network-vif-plugged-0475f047-2c5e-454d-aad5-29379387f010 for instance with vm_state building and task_state spawning. [ 1111.265081] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.183s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.265270] env[61974]: INFO nova.compute.manager [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Migrating [ 1111.351105] env[61974]: DEBUG nova.network.neutron [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Successfully updated port: 0475f047-2c5e-454d-aad5-29379387f010 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1111.779013] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "refresh_cache-3b0762f6-2419-491e-8929-835853a320af" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1111.779276] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired lock "refresh_cache-3b0762f6-2419-491e-8929-835853a320af" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.779487] env[61974]: DEBUG nova.network.neutron [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1111.853490] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Acquiring lock "refresh_cache-79be0650-faba-4516-b7cb-a25d45e941ee" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1111.853490] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Acquired lock "refresh_cache-79be0650-faba-4516-b7cb-a25d45e941ee" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.853703] env[61974]: DEBUG nova.network.neutron [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1112.406617] env[61974]: DEBUG nova.network.neutron [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1112.577574] env[61974]: DEBUG nova.network.neutron [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Updating instance_info_cache with network_info: [{"id": "b67804ff-e7c2-42a2-9b55-3965ed99c857", "address": "fa:16:3e:65:6e:7a", "network": {"id": "615a7a34-a392-45bd-ba4d-7b39605e520b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432153827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d104a741ebad47748ae5646356589fce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb67804ff-e7", "ovs_interfaceid": "b67804ff-e7c2-42a2-9b55-3965ed99c857", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.590365] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf076250-6ff1-40b5-8d31-e40cd2da0eb1 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Volume attach. Driver type: vmdk {{(pid=61974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1112.590890] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf076250-6ff1-40b5-8d31-e40cd2da0eb1 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293039', 'volume_id': 'b064d31a-890f-4a07-889e-b768586c162a', 'name': 'volume-b064d31a-890f-4a07-889e-b768586c162a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8d71aaf0-e35c-4e6e-9094-d55b1544c3c8', 'attached_at': '', 'detached_at': '', 'volume_id': 'b064d31a-890f-4a07-889e-b768586c162a', 'serial': 'b064d31a-890f-4a07-889e-b768586c162a'} {{(pid=61974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1112.591811] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0b2046-fa3b-4796-991c-5def24b83987 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.615716] env[61974]: DEBUG nova.network.neutron [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Updating instance_info_cache with network_info: [{"id": "0475f047-2c5e-454d-aad5-29379387f010", "address": "fa:16:3e:17:3e:88", "network": {"id": "a254b866-04be-476e-b94d-c867daf6b9a2", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1291467381-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee552977af4446889e2eeaead47d74a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0475f047-2c", "ovs_interfaceid": "0475f047-2c5e-454d-aad5-29379387f010", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.617079] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e388cdd0-4694-4175-8aa6-28bf61a191d7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.645137] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf076250-6ff1-40b5-8d31-e40cd2da0eb1 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] volume-b064d31a-890f-4a07-889e-b768586c162a/volume-b064d31a-890f-4a07-889e-b768586c162a.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1112.645650] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c9d2e72-f1ea-4d7c-b8ac-21b25e77279a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.664034] env[61974]: DEBUG oslo_vmware.api [None req-bf076250-6ff1-40b5-8d31-e40cd2da0eb1 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1112.664034] env[61974]: value = "task-1379527" [ 1112.664034] env[61974]: _type = "Task" [ 1112.664034] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.672925] env[61974]: DEBUG oslo_vmware.api [None req-bf076250-6ff1-40b5-8d31-e40cd2da0eb1 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379527, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.083521] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Releasing lock "refresh_cache-3b0762f6-2419-491e-8929-835853a320af" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1113.121326] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Releasing lock "refresh_cache-79be0650-faba-4516-b7cb-a25d45e941ee" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1113.121624] env[61974]: DEBUG nova.compute.manager [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Instance network_info: |[{"id": "0475f047-2c5e-454d-aad5-29379387f010", "address": "fa:16:3e:17:3e:88", "network": {"id": "a254b866-04be-476e-b94d-c867daf6b9a2", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1291467381-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee552977af4446889e2eeaead47d74a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0475f047-2c", "ovs_interfaceid": "0475f047-2c5e-454d-aad5-29379387f010", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1113.122070] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:3e:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '02bbcead-d833-4543-bec6-fb82dfe659ff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0475f047-2c5e-454d-aad5-29379387f010', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1113.129589] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Creating folder: Project (dee552977af4446889e2eeaead47d74a). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1113.129868] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f9dcc55-54d7-41e5-96c8-e40b2b57b0e3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.140213] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Created folder: Project (dee552977af4446889e2eeaead47d74a) in parent group-v292912. [ 1113.140213] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Creating folder: Instances. Parent ref: group-v293040. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1113.140213] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6792337-0f88-4370-93cb-85731dfec123 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.149404] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Created folder: Instances in parent group-v293040. [ 1113.149630] env[61974]: DEBUG oslo.service.loopingcall [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1113.149822] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1113.150038] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b13df36-5c89-4664-9723-baba048f0c52 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.168736] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1113.168736] env[61974]: value = "task-1379530" [ 1113.168736] env[61974]: _type = "Task" [ 1113.168736] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.174935] env[61974]: DEBUG oslo_vmware.api [None req-bf076250-6ff1-40b5-8d31-e40cd2da0eb1 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379527, 'name': ReconfigVM_Task, 'duration_secs': 0.367273} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.177700] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf076250-6ff1-40b5-8d31-e40cd2da0eb1 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Reconfigured VM instance instance-00000062 to attach disk [datastore2] volume-b064d31a-890f-4a07-889e-b768586c162a/volume-b064d31a-890f-4a07-889e-b768586c162a.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1113.182482] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379530, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.182699] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8596e50e-b85a-425b-931c-8d83f1530710 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.199399] env[61974]: DEBUG oslo_vmware.api [None req-bf076250-6ff1-40b5-8d31-e40cd2da0eb1 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1113.199399] env[61974]: value = "task-1379531" [ 1113.199399] env[61974]: _type = "Task" [ 1113.199399] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.207302] env[61974]: DEBUG oslo_vmware.api [None req-bf076250-6ff1-40b5-8d31-e40cd2da0eb1 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379531, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.285753] env[61974]: DEBUG nova.compute.manager [req-dd7b50e1-fbc3-45af-ad11-f662753bb86c req-e976665f-116f-489e-a7d4-a2239613fe40 service nova] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Received event network-changed-0475f047-2c5e-454d-aad5-29379387f010 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1113.285992] env[61974]: DEBUG nova.compute.manager [req-dd7b50e1-fbc3-45af-ad11-f662753bb86c req-e976665f-116f-489e-a7d4-a2239613fe40 service nova] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Refreshing instance network info cache due to event network-changed-0475f047-2c5e-454d-aad5-29379387f010. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1113.286202] env[61974]: DEBUG oslo_concurrency.lockutils [req-dd7b50e1-fbc3-45af-ad11-f662753bb86c req-e976665f-116f-489e-a7d4-a2239613fe40 service nova] Acquiring lock "refresh_cache-79be0650-faba-4516-b7cb-a25d45e941ee" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1113.286354] env[61974]: DEBUG oslo_concurrency.lockutils [req-dd7b50e1-fbc3-45af-ad11-f662753bb86c req-e976665f-116f-489e-a7d4-a2239613fe40 service nova] Acquired lock "refresh_cache-79be0650-faba-4516-b7cb-a25d45e941ee" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.286521] env[61974]: DEBUG nova.network.neutron [req-dd7b50e1-fbc3-45af-ad11-f662753bb86c req-e976665f-116f-489e-a7d4-a2239613fe40 service nova] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Refreshing network info cache for port 0475f047-2c5e-454d-aad5-29379387f010 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1113.679497] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379530, 'name': CreateVM_Task, 'duration_secs': 0.317384} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.679821] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1113.680409] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1113.680574] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.680907] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1113.681172] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12ca5e53-1e42-4bac-af73-98c33e66e2d1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.685678] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Waiting for the task: (returnval){ [ 1113.685678] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5241e80f-3d47-2207-b382-61167cc20850" [ 1113.685678] env[61974]: _type = "Task" [ 1113.685678] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.693678] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5241e80f-3d47-2207-b382-61167cc20850, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.707812] env[61974]: DEBUG oslo_vmware.api [None req-bf076250-6ff1-40b5-8d31-e40cd2da0eb1 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379531, 'name': ReconfigVM_Task, 'duration_secs': 0.164005} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.707912] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf076250-6ff1-40b5-8d31-e40cd2da0eb1 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293039', 'volume_id': 'b064d31a-890f-4a07-889e-b768586c162a', 'name': 'volume-b064d31a-890f-4a07-889e-b768586c162a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8d71aaf0-e35c-4e6e-9094-d55b1544c3c8', 'attached_at': '', 'detached_at': '', 'volume_id': 'b064d31a-890f-4a07-889e-b768586c162a', 'serial': 'b064d31a-890f-4a07-889e-b768586c162a'} {{(pid=61974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1114.077179] env[61974]: DEBUG nova.network.neutron [req-dd7b50e1-fbc3-45af-ad11-f662753bb86c req-e976665f-116f-489e-a7d4-a2239613fe40 service nova] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Updated VIF entry in instance network info cache for port 0475f047-2c5e-454d-aad5-29379387f010. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1114.077549] env[61974]: DEBUG nova.network.neutron [req-dd7b50e1-fbc3-45af-ad11-f662753bb86c req-e976665f-116f-489e-a7d4-a2239613fe40 service nova] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Updating instance_info_cache with network_info: [{"id": "0475f047-2c5e-454d-aad5-29379387f010", "address": "fa:16:3e:17:3e:88", "network": {"id": "a254b866-04be-476e-b94d-c867daf6b9a2", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1291467381-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dee552977af4446889e2eeaead47d74a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0475f047-2c", "ovs_interfaceid": "0475f047-2c5e-454d-aad5-29379387f010", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.196950] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5241e80f-3d47-2207-b382-61167cc20850, 'name': SearchDatastore_Task, 'duration_secs': 0.009687} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.197570] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1114.197570] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1114.197786] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1114.197934] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.198134] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1114.198399] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49b68bea-5e01-48be-979e-d7e551c5adea {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.206374] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1114.206557] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1114.207259] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7241353-52c5-4916-b9e8-1b2dfd5b97f2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.214194] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Waiting for the task: (returnval){ [ 1114.214194] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52da92ed-2134-0b80-7639-45dc936b8c0e" [ 1114.214194] env[61974]: _type = "Task" [ 1114.214194] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.221666] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52da92ed-2134-0b80-7639-45dc936b8c0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.580273] env[61974]: DEBUG oslo_concurrency.lockutils [req-dd7b50e1-fbc3-45af-ad11-f662753bb86c req-e976665f-116f-489e-a7d4-a2239613fe40 service nova] Releasing lock "refresh_cache-79be0650-faba-4516-b7cb-a25d45e941ee" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1114.598415] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f066e7a8-587f-461d-9ea7-ccf67b99423a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.616780] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Updating instance '3b0762f6-2419-491e-8929-835853a320af' progress to 0 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1114.725054] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52da92ed-2134-0b80-7639-45dc936b8c0e, 'name': SearchDatastore_Task, 'duration_secs': 0.008185} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.726201] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97335540-c471-44ae-a1c3-b91613a2a364 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.732018] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Waiting for the task: (returnval){ [ 1114.732018] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52997b5a-626b-0b68-5673-47595bbc9d4b" [ 1114.732018] env[61974]: _type = "Task" [ 1114.732018] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.739514] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52997b5a-626b-0b68-5673-47595bbc9d4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.759593] env[61974]: DEBUG nova.objects.instance [None req-bf076250-6ff1-40b5-8d31-e40cd2da0eb1 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lazy-loading 'flavor' on Instance uuid 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1115.123198] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1115.123689] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff8eabd2-6f6f-4533-846f-ef35ac9ceb10 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.132275] env[61974]: DEBUG oslo_vmware.api [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1115.132275] env[61974]: value = "task-1379532" [ 1115.132275] env[61974]: _type = "Task" [ 1115.132275] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.140406] env[61974]: DEBUG oslo_vmware.api [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379532, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.242429] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52997b5a-626b-0b68-5673-47595bbc9d4b, 'name': SearchDatastore_Task, 'duration_secs': 0.009079} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.242731] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1115.243009] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 79be0650-faba-4516-b7cb-a25d45e941ee/79be0650-faba-4516-b7cb-a25d45e941ee.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1115.243286] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fb5a2fcc-0239-4ff2-b415-3dda246cfb1e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.250137] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Waiting for the task: (returnval){ [ 1115.250137] env[61974]: value = "task-1379533" [ 1115.250137] env[61974]: _type = "Task" [ 1115.250137] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.259579] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': task-1379533, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.264481] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bf076250-6ff1-40b5-8d31-e40cd2da0eb1 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.293s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.583062] env[61974]: INFO nova.compute.manager [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Rebuilding instance [ 1115.624787] env[61974]: DEBUG nova.compute.manager [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1115.625705] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c6a690-9ef2-4888-968c-5b4f71d34d4b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.642890] env[61974]: DEBUG oslo_vmware.api [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379532, 'name': PowerOffVM_Task, 'duration_secs': 0.277479} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.642890] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1115.643178] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Updating instance '3b0762f6-2419-491e-8929-835853a320af' progress to 17 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1115.760862] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': task-1379533, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.140696] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1116.141023] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-498ca8e9-8f1d-4f43-b484-04404c955840 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.150134] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1116.150431] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1116.150639] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1116.150846] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1116.151010] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1116.151179] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1116.151392] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1116.151560] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1116.151726] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1116.151891] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1116.152080] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1116.157553] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1116.157553] env[61974]: value = "task-1379534" [ 1116.157553] env[61974]: _type = "Task" [ 1116.157553] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.157775] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e15576c1-f367-4d0f-b18e-f62705537140 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.176648] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379534, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.177900] env[61974]: DEBUG oslo_vmware.api [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1116.177900] env[61974]: value = "task-1379535" [ 1116.177900] env[61974]: _type = "Task" [ 1116.177900] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.185642] env[61974]: DEBUG oslo_vmware.api [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379535, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.261801] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': task-1379533, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.711894} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.263171] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 79be0650-faba-4516-b7cb-a25d45e941ee/79be0650-faba-4516-b7cb-a25d45e941ee.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1116.263171] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1116.263171] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bf47fa8e-5182-4b60-ac77-ca27245cef81 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.270070] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Waiting for the task: (returnval){ [ 1116.270070] env[61974]: value = "task-1379536" [ 1116.270070] env[61974]: _type = "Task" [ 1116.270070] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.279943] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': task-1379536, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.677487] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379534, 'name': PowerOffVM_Task, 'duration_secs': 0.198189} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.679058] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1116.687650] env[61974]: DEBUG oslo_vmware.api [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379535, 'name': ReconfigVM_Task, 'duration_secs': 0.186543} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.687951] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Updating instance '3b0762f6-2419-491e-8929-835853a320af' progress to 33 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1116.737511] env[61974]: INFO nova.compute.manager [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Detaching volume b064d31a-890f-4a07-889e-b768586c162a [ 1116.771994] env[61974]: INFO nova.virt.block_device [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Attempting to driver detach volume b064d31a-890f-4a07-889e-b768586c162a from mountpoint /dev/sdb [ 1116.772283] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Volume detach. Driver type: vmdk {{(pid=61974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1116.772472] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293039', 'volume_id': 'b064d31a-890f-4a07-889e-b768586c162a', 'name': 'volume-b064d31a-890f-4a07-889e-b768586c162a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8d71aaf0-e35c-4e6e-9094-d55b1544c3c8', 'attached_at': '', 'detached_at': '', 'volume_id': 'b064d31a-890f-4a07-889e-b768586c162a', 'serial': 'b064d31a-890f-4a07-889e-b768586c162a'} {{(pid=61974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1116.777246] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746ffcd6-cb62-4b12-91a7-196628001d89 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.786019] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': task-1379536, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079326} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.803300] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1116.804265] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cbf7c62-b0da-4c14-b6a5-ba644c8f4db2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.807450] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-130944f2-a7c5-4f88-b126-2b3007b77a91 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.831072] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] 79be0650-faba-4516-b7cb-a25d45e941ee/79be0650-faba-4516-b7cb-a25d45e941ee.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1116.833222] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-faa20685-5f7f-41e4-a0d4-81e5e5e66272 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.849951] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4af504b-772c-45be-9e0b-2e5b08c8c396 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.872984] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68602a08-4f6a-48db-a4ae-bc28ab9ac285 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.875545] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Waiting for the task: (returnval){ [ 1116.875545] env[61974]: value = "task-1379537" [ 1116.875545] env[61974]: _type = "Task" [ 1116.875545] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.889516] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] The volume has not been displaced from its original location: [datastore2] volume-b064d31a-890f-4a07-889e-b768586c162a/volume-b064d31a-890f-4a07-889e-b768586c162a.vmdk. No consolidation needed. {{(pid=61974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1116.894664] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Reconfiguring VM instance instance-00000062 to detach disk 2001 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1116.895314] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-768028b6-9bc5-4f2e-9e5b-1952e4f68e20 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.910493] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': task-1379537, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.915086] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1116.915086] env[61974]: value = "task-1379538" [ 1116.915086] env[61974]: _type = "Task" [ 1116.915086] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.922618] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379538, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.195074] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1117.195074] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1117.195074] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1117.195074] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1117.195668] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1117.195668] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1117.195668] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1117.195782] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1117.195909] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1117.196092] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1117.196331] env[61974]: DEBUG nova.virt.hardware [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1117.201744] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Reconfiguring VM instance instance-00000069 to detach disk 2000 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1117.202065] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e72fae5-6fc1-452c-b9d4-0cd4aabb6758 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.219947] env[61974]: DEBUG oslo_vmware.api [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1117.219947] env[61974]: value = "task-1379539" [ 1117.219947] env[61974]: _type = "Task" [ 1117.219947] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.228052] env[61974]: DEBUG oslo_vmware.api [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379539, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.385148] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': task-1379537, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.424337] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379538, 'name': ReconfigVM_Task, 'duration_secs': 0.185573} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.424645] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Reconfigured VM instance instance-00000062 to detach disk 2001 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1117.429423] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f28f1bbf-59dd-462a-9d96-a853ff63c81b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.444030] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1117.444030] env[61974]: value = "task-1379540" [ 1117.444030] env[61974]: _type = "Task" [ 1117.444030] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.451791] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379540, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.732737] env[61974]: DEBUG oslo_vmware.api [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379539, 'name': ReconfigVM_Task, 'duration_secs': 0.458604} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.733139] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Reconfigured VM instance instance-00000069 to detach disk 2000 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1117.734318] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f57e534a-2b80-4ca5-9f83-93c3805afff5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.771071] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 3b0762f6-2419-491e-8929-835853a320af/3b0762f6-2419-491e-8929-835853a320af.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1117.771419] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46bd5c36-a0d8-42dc-9f51-2be6be953e8c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.798386] env[61974]: DEBUG oslo_vmware.api [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1117.798386] env[61974]: value = "task-1379541" [ 1117.798386] env[61974]: _type = "Task" [ 1117.798386] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.809041] env[61974]: DEBUG oslo_vmware.api [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379541, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.886464] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': task-1379537, 'name': ReconfigVM_Task, 'duration_secs': 0.908164} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.887492] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Reconfigured VM instance instance-0000006a to attach disk [datastore2] 79be0650-faba-4516-b7cb-a25d45e941ee/79be0650-faba-4516-b7cb-a25d45e941ee.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1117.887632] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-282c802b-1b9b-4299-af82-8c065f529149 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.893655] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Waiting for the task: (returnval){ [ 1117.893655] env[61974]: value = "task-1379542" [ 1117.893655] env[61974]: _type = "Task" [ 1117.893655] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.901068] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': task-1379542, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.953064] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379540, 'name': ReconfigVM_Task, 'duration_secs': 0.271936} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.953378] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293039', 'volume_id': 'b064d31a-890f-4a07-889e-b768586c162a', 'name': 'volume-b064d31a-890f-4a07-889e-b768586c162a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8d71aaf0-e35c-4e6e-9094-d55b1544c3c8', 'attached_at': '', 'detached_at': '', 'volume_id': 'b064d31a-890f-4a07-889e-b768586c162a', 'serial': 'b064d31a-890f-4a07-889e-b768586c162a'} {{(pid=61974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1118.308459] env[61974]: DEBUG oslo_vmware.api [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379541, 'name': ReconfigVM_Task, 'duration_secs': 0.276809} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.308459] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 3b0762f6-2419-491e-8929-835853a320af/3b0762f6-2419-491e-8929-835853a320af.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1118.308693] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Updating instance '3b0762f6-2419-491e-8929-835853a320af' progress to 50 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1118.403679] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': task-1379542, 'name': Rename_Task, 'duration_secs': 0.130985} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.403950] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1118.404207] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-467e4b4e-8f67-4e07-aba8-b26a2cccbe2f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.409826] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Waiting for the task: (returnval){ [ 1118.409826] env[61974]: value = "task-1379543" [ 1118.409826] env[61974]: _type = "Task" [ 1118.409826] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.416842] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': task-1379543, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.816560] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b25627-6ae6-4e15-aecc-5e1bb341cd02 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.836709] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb25254d-a93e-46f2-91a4-8006765fbdf8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.856362] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Updating instance '3b0762f6-2419-491e-8929-835853a320af' progress to 67 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1118.918882] env[61974]: DEBUG oslo_vmware.api [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': task-1379543, 'name': PowerOnVM_Task, 'duration_secs': 0.43308} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.919384] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1118.919709] env[61974]: INFO nova.compute.manager [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Took 7.90 seconds to spawn the instance on the hypervisor. [ 1118.920049] env[61974]: DEBUG nova.compute.manager [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1118.920909] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ccccd2a-096b-45ff-bf9e-0249465c4837 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.004460] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1119.005029] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0904f0da-b64e-44fe-abf2-be98ae92092c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.011319] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1119.011319] env[61974]: value = "task-1379544" [ 1119.011319] env[61974]: _type = "Task" [ 1119.011319] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.019169] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379544, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.396530] env[61974]: DEBUG nova.network.neutron [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Port b67804ff-e7c2-42a2-9b55-3965ed99c857 binding to destination host cpu-1 is already ACTIVE {{(pid=61974) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1119.437756] env[61974]: INFO nova.compute.manager [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Took 12.99 seconds to build instance. [ 1119.523045] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] VM already powered off {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1119.523045] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Volume detach. Driver type: vmdk {{(pid=61974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1119.523045] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293039', 'volume_id': 'b064d31a-890f-4a07-889e-b768586c162a', 'name': 'volume-b064d31a-890f-4a07-889e-b768586c162a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8d71aaf0-e35c-4e6e-9094-d55b1544c3c8', 'attached_at': '', 'detached_at': '', 'volume_id': 'b064d31a-890f-4a07-889e-b768586c162a', 'serial': 'b064d31a-890f-4a07-889e-b768586c162a'} {{(pid=61974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1119.523395] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d268fd4-ecaf-41c9-ad6c-6011fe592610 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.540540] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d5ea6b-1419-4ac2-9ab6-f66a03b009d5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.546745] env[61974]: WARNING nova.virt.vmwareapi.driver [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1119.547042] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1119.547737] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e8b8953-0443-44b8-9955-a2f4a2a9f5c6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.553636] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1119.553852] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51e99485-2a36-4335-852e-b94efc04f3d5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.617728] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1119.617953] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1119.618156] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Deleting the datastore file [datastore2] 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1119.618486] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2f0b4200-a72d-4bf2-b652-1fddc6913a1c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.625595] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1119.625595] env[61974]: value = "task-1379546" [ 1119.625595] env[61974]: _type = "Task" [ 1119.625595] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.633231] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379546, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.939672] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d47b1675-5354-47bd-b512-c54afc675cc2 tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Lock "79be0650-faba-4516-b7cb-a25d45e941ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.496s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1119.954748] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Acquiring lock "79be0650-faba-4516-b7cb-a25d45e941ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.954952] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Lock "79be0650-faba-4516-b7cb-a25d45e941ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1119.955179] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Acquiring lock "79be0650-faba-4516-b7cb-a25d45e941ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.955371] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Lock "79be0650-faba-4516-b7cb-a25d45e941ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1119.955543] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Lock "79be0650-faba-4516-b7cb-a25d45e941ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1119.957410] env[61974]: INFO nova.compute.manager [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Terminating instance [ 1119.959029] env[61974]: DEBUG nova.compute.manager [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1119.959234] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1119.960054] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31dd6d81-bccf-4fdb-8bf6-960c1d4181b7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.969396] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1119.969617] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5725449b-6231-4c7d-880a-276e48361f5b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.975343] env[61974]: DEBUG oslo_vmware.api [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Waiting for the task: (returnval){ [ 1119.975343] env[61974]: value = "task-1379547" [ 1119.975343] env[61974]: _type = "Task" [ 1119.975343] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.982833] env[61974]: DEBUG oslo_vmware.api [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': task-1379547, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.135321] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379546, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12665} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.135589] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1120.135801] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1120.136070] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1120.417863] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "3b0762f6-2419-491e-8929-835853a320af-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.418079] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "3b0762f6-2419-491e-8929-835853a320af-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.418268] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "3b0762f6-2419-491e-8929-835853a320af-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.485692] env[61974]: DEBUG oslo_vmware.api [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': task-1379547, 'name': PowerOffVM_Task, 'duration_secs': 0.190935} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.485956] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1120.486153] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1120.486421] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f31b44be-8ace-4dcd-8565-581c3bcd2203 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.615146] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1120.615410] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1120.615600] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Deleting the datastore file [datastore2] 79be0650-faba-4516-b7cb-a25d45e941ee {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1120.615881] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-02ef05f1-6552-4406-be48-7f651f84ca7c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.622192] env[61974]: DEBUG oslo_vmware.api [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Waiting for the task: (returnval){ [ 1120.622192] env[61974]: value = "task-1379549" [ 1120.622192] env[61974]: _type = "Task" [ 1120.622192] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.629776] env[61974]: DEBUG oslo_vmware.api [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': task-1379549, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.641346] env[61974]: INFO nova.virt.block_device [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Booting with volume b064d31a-890f-4a07-889e-b768586c162a at /dev/sdb [ 1120.675525] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-16787509-1e7b-491e-8853-ad2926274842 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.684965] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf7acd2-4d74-4f14-aba3-3d7faa58d892 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.710283] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-61da7542-9dc4-48fc-b1bb-fe438cd4067f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.717546] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1621a86c-f43e-49a7-abd4-3581e45ba211 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.743398] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6569ff-234c-4b78-a260-1b367966c402 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.749824] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88d0770-4a0f-49c8-8754-e4085378e703 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.763040] env[61974]: DEBUG nova.virt.block_device [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Updating existing volume attachment record: 7a637897-7224-4a50-8b4a-59e8a82a1084 {{(pid=61974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1121.133152] env[61974]: DEBUG oslo_vmware.api [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Task: {'id': task-1379549, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137831} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.133504] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1121.133547] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1121.133722] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1121.133899] env[61974]: INFO nova.compute.manager [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1121.134200] env[61974]: DEBUG oslo.service.loopingcall [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1121.134421] env[61974]: DEBUG nova.compute.manager [-] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1121.134518] env[61974]: DEBUG nova.network.neutron [-] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1121.369309] env[61974]: DEBUG nova.compute.manager [req-edb088d0-43f6-497b-8b2c-d0257746374b req-e15080cc-648a-46c8-9a51-3ace12385af1 service nova] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Received event network-vif-deleted-0475f047-2c5e-454d-aad5-29379387f010 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1121.369309] env[61974]: INFO nova.compute.manager [req-edb088d0-43f6-497b-8b2c-d0257746374b req-e15080cc-648a-46c8-9a51-3ace12385af1 service nova] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Neutron deleted interface 0475f047-2c5e-454d-aad5-29379387f010; detaching it from the instance and deleting it from the info cache [ 1121.369309] env[61974]: DEBUG nova.network.neutron [req-edb088d0-43f6-497b-8b2c-d0257746374b req-e15080cc-648a-46c8-9a51-3ace12385af1 service nova] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1121.456724] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "refresh_cache-3b0762f6-2419-491e-8929-835853a320af" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1121.456923] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired lock "refresh_cache-3b0762f6-2419-491e-8929-835853a320af" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.457121] env[61974]: DEBUG nova.network.neutron [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1121.847654] env[61974]: DEBUG nova.network.neutron [-] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1121.872170] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c04b1441-94d8-4327-921f-050a877e0ef9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.882438] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58582fea-54b7-4032-9a3e-8e196eb8155a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.909203] env[61974]: DEBUG nova.compute.manager [req-edb088d0-43f6-497b-8b2c-d0257746374b req-e15080cc-648a-46c8-9a51-3ace12385af1 service nova] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Detach interface failed, port_id=0475f047-2c5e-454d-aad5-29379387f010, reason: Instance 79be0650-faba-4516-b7cb-a25d45e941ee could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1122.161760] env[61974]: DEBUG nova.network.neutron [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Updating instance_info_cache with network_info: [{"id": "b67804ff-e7c2-42a2-9b55-3965ed99c857", "address": "fa:16:3e:65:6e:7a", "network": {"id": "615a7a34-a392-45bd-ba4d-7b39605e520b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432153827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d104a741ebad47748ae5646356589fce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb67804ff-e7", "ovs_interfaceid": "b67804ff-e7c2-42a2-9b55-3965ed99c857", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.349971] env[61974]: INFO nova.compute.manager [-] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Took 1.22 seconds to deallocate network for instance. [ 1122.664729] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Releasing lock "refresh_cache-3b0762f6-2419-491e-8929-835853a320af" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1122.856421] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1122.857139] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1122.857416] env[61974]: DEBUG nova.objects.instance [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Lazy-loading 'resources' on Instance uuid 79be0650-faba-4516-b7cb-a25d45e941ee {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1122.893105] env[61974]: DEBUG nova.virt.hardware [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1122.893390] env[61974]: DEBUG nova.virt.hardware [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1122.893557] env[61974]: DEBUG nova.virt.hardware [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1122.893747] env[61974]: DEBUG nova.virt.hardware [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1122.893901] env[61974]: DEBUG nova.virt.hardware [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1122.894069] env[61974]: DEBUG nova.virt.hardware [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1122.894465] env[61974]: DEBUG nova.virt.hardware [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1122.894465] env[61974]: DEBUG nova.virt.hardware [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1122.894606] env[61974]: DEBUG nova.virt.hardware [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1122.894775] env[61974]: DEBUG nova.virt.hardware [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1122.894954] env[61974]: DEBUG nova.virt.hardware [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1122.895817] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0141d42e-b722-474a-8223-0cf564fc5321 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.904232] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55282587-4582-4a1e-a1ff-83dc6e117dbf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.917343] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:ac:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c791d09c-1086-4ee1-bcde-6ca7d259cabd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e43abc26-b25e-444f-9857-3967570e294a', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1122.924699] env[61974]: DEBUG oslo.service.loopingcall [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1122.924931] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1122.925150] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a6439015-a0af-43c3-b9c0-8e795bb632d1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.944850] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1122.944850] env[61974]: value = "task-1379550" [ 1122.944850] env[61974]: _type = "Task" [ 1122.944850] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.952246] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379550, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.191161] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfbeea3c-7ac7-4b7c-9c58-12d786e1d5eb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.210940] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c20ea90-286e-4080-b7e0-20d345703b29 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.218216] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Updating instance '3b0762f6-2419-491e-8929-835853a320af' progress to 83 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1123.455110] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379550, 'name': CreateVM_Task, 'duration_secs': 0.285434} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.455308] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1123.458077] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1123.458262] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.458591] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1123.459045] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91a28d15-a517-433a-b587-3de711428c88 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.463527] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1123.463527] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5243681f-43d1-4bda-b536-cf4344f7eea2" [ 1123.463527] env[61974]: _type = "Task" [ 1123.463527] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.473277] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5243681f-43d1-4bda-b536-cf4344f7eea2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.474740] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3efe07f9-ab12-487d-9b4d-03e790e91dc2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.480830] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89804887-eca2-494d-b174-e7f6609b5dcd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.510041] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c44349b-7553-4016-80ee-95c9539a7e29 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.517420] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffbf1c0c-0b5b-49ff-8ebd-a8dc1a88b1b8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.531110] env[61974]: DEBUG nova.compute.provider_tree [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1123.724987] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1123.725273] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5bb40edf-987f-4816-9b9c-3c72466055f3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.732725] env[61974]: DEBUG oslo_vmware.api [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1123.732725] env[61974]: value = "task-1379551" [ 1123.732725] env[61974]: _type = "Task" [ 1123.732725] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.739958] env[61974]: DEBUG oslo_vmware.api [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379551, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.974956] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5243681f-43d1-4bda-b536-cf4344f7eea2, 'name': SearchDatastore_Task, 'duration_secs': 0.00912} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.975354] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1123.975588] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1123.975855] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1123.976023] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.976218] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1123.976501] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7f5de23-3b7d-4e52-a6ab-a1f7136109a1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.984396] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1123.984583] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1123.985301] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8094c87-6161-48a4-a6ac-c45e9869d903 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.990306] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1123.990306] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5283d4a4-fd15-8fe2-4b3d-eb2523206337" [ 1123.990306] env[61974]: _type = "Task" [ 1123.990306] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.998013] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5283d4a4-fd15-8fe2-4b3d-eb2523206337, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.035158] env[61974]: DEBUG nova.scheduler.client.report [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1124.242765] env[61974]: DEBUG oslo_vmware.api [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379551, 'name': PowerOnVM_Task, 'duration_secs': 0.385429} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.243093] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1124.243296] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-1d9ad123-9ddf-431f-96d9-01c338c14a5a tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Updating instance '3b0762f6-2419-491e-8929-835853a320af' progress to 100 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1124.501155] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5283d4a4-fd15-8fe2-4b3d-eb2523206337, 'name': SearchDatastore_Task, 'duration_secs': 0.01323} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.501965] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88526479-d5f6-461f-b023-f04f77c65e5c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.506766] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2ccd3cdc-8618-4c6e-9fe1-c2f6c1f861f6 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "4d4f5746-5873-4933-8741-c07ca43c13cb" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.507014] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2ccd3cdc-8618-4c6e-9fe1-c2f6c1f861f6 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "4d4f5746-5873-4933-8741-c07ca43c13cb" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1124.509539] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1124.509539] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52642526-52c5-f22a-4369-ad0b3bba02e8" [ 1124.509539] env[61974]: _type = "Task" [ 1124.509539] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.518389] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52642526-52c5-f22a-4369-ad0b3bba02e8, 'name': SearchDatastore_Task, 'duration_secs': 0.009019} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.519118] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1124.519380] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8/8d71aaf0-e35c-4e6e-9094-d55b1544c3c8.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1124.519622] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72d4fe0d-9605-409e-9fca-47297a9fa930 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.526321] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1124.526321] env[61974]: value = "task-1379552" [ 1124.526321] env[61974]: _type = "Task" [ 1124.526321] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.533858] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379552, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.540948] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.684s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.559188] env[61974]: INFO nova.scheduler.client.report [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Deleted allocations for instance 79be0650-faba-4516-b7cb-a25d45e941ee [ 1125.011770] env[61974]: DEBUG nova.compute.utils [None req-2ccd3cdc-8618-4c6e-9fe1-c2f6c1f861f6 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1125.039536] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379552, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.455379} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.039863] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8/8d71aaf0-e35c-4e6e-9094-d55b1544c3c8.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1125.040201] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1125.040551] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-50e37eb8-c139-4551-a8d6-35c57c1683da {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.047373] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1125.047373] env[61974]: value = "task-1379553" [ 1125.047373] env[61974]: _type = "Task" [ 1125.047373] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.054542] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379553, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.065812] env[61974]: DEBUG oslo_concurrency.lockutils [None req-1c2f7d80-087f-4f79-8517-ca813dfd6f3f tempest-ServerAddressesTestJSON-1528825328 tempest-ServerAddressesTestJSON-1528825328-project-member] Lock "79be0650-faba-4516-b7cb-a25d45e941ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.111s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.514390] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2ccd3cdc-8618-4c6e-9fe1-c2f6c1f861f6 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "4d4f5746-5873-4933-8741-c07ca43c13cb" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.558091] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379553, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061852} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.558376] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1125.559187] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28583dc3-0caf-4c58-a4ea-bb9f7704d1a7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.582178] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8/8d71aaf0-e35c-4e6e-9094-d55b1544c3c8.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1125.582447] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e12db3e-0261-4ea6-af7b-c837f5eb954a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.601658] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1125.601658] env[61974]: value = "task-1379554" [ 1125.601658] env[61974]: _type = "Task" [ 1125.601658] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.609112] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379554, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.113591] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379554, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.591135] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2ccd3cdc-8618-4c6e-9fe1-c2f6c1f861f6 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "4d4f5746-5873-4933-8741-c07ca43c13cb" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.591480] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2ccd3cdc-8618-4c6e-9fe1-c2f6c1f861f6 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "4d4f5746-5873-4933-8741-c07ca43c13cb" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.591630] env[61974]: INFO nova.compute.manager [None req-2ccd3cdc-8618-4c6e-9fe1-c2f6c1f861f6 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Attaching volume 453401d3-6611-44e6-8f20-5e07f43fd6dd to /dev/sdb [ 1126.611633] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379554, 'name': ReconfigVM_Task, 'duration_secs': 0.728923} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.611889] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8/8d71aaf0-e35c-4e6e-9094-d55b1544c3c8.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1126.613280] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_type': 'disk', 'device_name': '/dev/sda', 'encryption_options': None, 'size': 0, 'encryption_format': None, 'encrypted': False, 'guest_format': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'disk_bus': None, 'image_id': '2c021a64-f3a3-4b0a-8c90-b07440a3f3d8'}], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': '7a637897-7224-4a50-8b4a-59e8a82a1084', 'delete_on_termination': False, 'device_type': None, 'mount_device': '/dev/sdb', 'guest_format': None, 'boot_index': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293039', 'volume_id': 'b064d31a-890f-4a07-889e-b768586c162a', 'name': 'volume-b064d31a-890f-4a07-889e-b768586c162a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8d71aaf0-e35c-4e6e-9094-d55b1544c3c8', 'attached_at': '', 'detached_at': '', 'volume_id': 'b064d31a-890f-4a07-889e-b768586c162a', 'serial': 'b064d31a-890f-4a07-889e-b768586c162a'}, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=61974) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1126.613465] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Volume attach. Driver type: vmdk {{(pid=61974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1126.613666] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293039', 'volume_id': 'b064d31a-890f-4a07-889e-b768586c162a', 'name': 'volume-b064d31a-890f-4a07-889e-b768586c162a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8d71aaf0-e35c-4e6e-9094-d55b1544c3c8', 'attached_at': '', 'detached_at': '', 'volume_id': 'b064d31a-890f-4a07-889e-b768586c162a', 'serial': 'b064d31a-890f-4a07-889e-b768586c162a'} {{(pid=61974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1126.614449] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b456fc-e6b1-4201-8d09-d5dbb2daab8e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.630800] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d281a4c-952b-4d9f-8369-c92f7ce0d495 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.633609] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-449938d3-4f10-4bfc-a881-468f9a8c305c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.656739] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] volume-b064d31a-890f-4a07-889e-b768586c162a/volume-b064d31a-890f-4a07-889e-b768586c162a.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1126.658319] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-effdc46c-fdf9-4c56-870a-3ddbba2b7c23 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.672977] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb63c3f9-74ff-4c90-9d6f-0ffdb3726bbe {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.679681] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1126.679681] env[61974]: value = "task-1379555" [ 1126.679681] env[61974]: _type = "Task" [ 1126.679681] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.686734] env[61974]: DEBUG nova.virt.block_device [None req-2ccd3cdc-8618-4c6e-9fe1-c2f6c1f861f6 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Updating existing volume attachment record: ee789627-14cd-4151-9197-3e6ecc57bce6 {{(pid=61974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1126.691552] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379555, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.903690] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "3b0762f6-2419-491e-8929-835853a320af" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.904025] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "3b0762f6-2419-491e-8929-835853a320af" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.904260] env[61974]: DEBUG nova.compute.manager [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Going to confirm migration 3 {{(pid=61974) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1127.188920] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379555, 'name': ReconfigVM_Task, 'duration_secs': 0.451394} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.189238] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Reconfigured VM instance instance-00000062 to attach disk [datastore2] volume-b064d31a-890f-4a07-889e-b768586c162a/volume-b064d31a-890f-4a07-889e-b768586c162a.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1127.194332] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7393f6a3-e482-4a02-9e16-f6eebef2d7af {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.208461] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1127.208461] env[61974]: value = "task-1379557" [ 1127.208461] env[61974]: _type = "Task" [ 1127.208461] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.215938] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379557, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.443776] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "refresh_cache-3b0762f6-2419-491e-8929-835853a320af" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1127.444030] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquired lock "refresh_cache-3b0762f6-2419-491e-8929-835853a320af" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.444225] env[61974]: DEBUG nova.network.neutron [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1127.444419] env[61974]: DEBUG nova.objects.instance [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lazy-loading 'info_cache' on Instance uuid 3b0762f6-2419-491e-8929-835853a320af {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1127.718346] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379557, 'name': ReconfigVM_Task, 'duration_secs': 0.169535} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.718678] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293039', 'volume_id': 'b064d31a-890f-4a07-889e-b768586c162a', 'name': 'volume-b064d31a-890f-4a07-889e-b768586c162a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8d71aaf0-e35c-4e6e-9094-d55b1544c3c8', 'attached_at': '', 'detached_at': '', 'volume_id': 'b064d31a-890f-4a07-889e-b768586c162a', 'serial': 'b064d31a-890f-4a07-889e-b768586c162a'} {{(pid=61974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1127.719271] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-559abe09-e77a-40b5-a13c-9be1f84a1813 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.724962] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1127.724962] env[61974]: value = "task-1379558" [ 1127.724962] env[61974]: _type = "Task" [ 1127.724962] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.732365] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379558, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.877764] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1127.878034] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1127.878197] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Starting heal instance info cache {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1128.234906] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379558, 'name': Rename_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.639602] env[61974]: DEBUG nova.network.neutron [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Updating instance_info_cache with network_info: [{"id": "b67804ff-e7c2-42a2-9b55-3965ed99c857", "address": "fa:16:3e:65:6e:7a", "network": {"id": "615a7a34-a392-45bd-ba4d-7b39605e520b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1432153827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d104a741ebad47748ae5646356589fce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb67804ff-e7", "ovs_interfaceid": "b67804ff-e7c2-42a2-9b55-3965ed99c857", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.735317] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379558, 'name': Rename_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.142559] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Releasing lock "refresh_cache-3b0762f6-2419-491e-8929-835853a320af" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1129.142859] env[61974]: DEBUG nova.objects.instance [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lazy-loading 'migration_context' on Instance uuid 3b0762f6-2419-491e-8929-835853a320af {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1129.237082] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379558, 'name': Rename_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.413948] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "refresh_cache-59c72be0-46de-4cb8-93d6-0a2c70c90e2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1129.414125] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquired lock "refresh_cache-59c72be0-46de-4cb8-93d6-0a2c70c90e2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.414280] env[61974]: DEBUG nova.network.neutron [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Forcefully refreshing network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1129.645796] env[61974]: DEBUG nova.objects.base [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Object Instance<3b0762f6-2419-491e-8929-835853a320af> lazy-loaded attributes: info_cache,migration_context {{(pid=61974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1129.646838] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c7d320-11f6-45bd-af61-4ec1e0e774c0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.665367] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfee9d37-7c7d-49f2-950c-4861b4359c24 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.670159] env[61974]: DEBUG oslo_vmware.api [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1129.670159] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52d17579-c222-4ca7-bc38-ec391d063733" [ 1129.670159] env[61974]: _type = "Task" [ 1129.670159] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.677213] env[61974]: DEBUG oslo_vmware.api [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d17579-c222-4ca7-bc38-ec391d063733, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.734591] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379558, 'name': Rename_Task, 'duration_secs': 1.621367} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.734931] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1129.735188] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d30dfc1-b9f9-46b0-9f8e-c3dc774d2f30 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.740965] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1129.740965] env[61974]: value = "task-1379560" [ 1129.740965] env[61974]: _type = "Task" [ 1129.740965] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.747855] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379560, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.181205] env[61974]: DEBUG oslo_vmware.api [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d17579-c222-4ca7-bc38-ec391d063733, 'name': SearchDatastore_Task, 'duration_secs': 0.006852} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.181497] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1130.181744] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1130.251025] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379560, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.632422] env[61974]: DEBUG nova.network.neutron [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Updating instance_info_cache with network_info: [{"id": "2ebf62dc-0f02-4b1b-bd8f-adc0186ae753", "address": "fa:16:3e:1d:de:e9", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ebf62dc-0f", "ovs_interfaceid": "2ebf62dc-0f02-4b1b-bd8f-adc0186ae753", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.752774] env[61974]: DEBUG oslo_vmware.api [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379560, 'name': PowerOnVM_Task, 'duration_secs': 0.703289} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.755033] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1130.755283] env[61974]: DEBUG nova.compute.manager [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1130.756218] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18bdb41a-bf93-4178-bb68-3510faecad2b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.766161] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af57d1b-17f9-480f-84b2-0b15cb39f0eb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.773097] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c5fdb4f-72d3-4367-b076-f8d2580a7555 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.803342] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b138aa-8b7b-49ea-8fe3-8577a8496eb9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.810591] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c9ee8a-5fad-48de-b9b1-d0730f8eb837 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.823880] env[61974]: DEBUG nova.compute.provider_tree [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1131.135720] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Releasing lock "refresh_cache-59c72be0-46de-4cb8-93d6-0a2c70c90e2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1131.136047] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Updated the network info_cache for instance {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1131.136373] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.136526] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.136731] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.136941] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.137203] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.137412] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.137588] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1131.137791] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager.update_available_resource {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.233621] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ccd3cdc-8618-4c6e-9fe1-c2f6c1f861f6 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Volume attach. Driver type: vmdk {{(pid=61974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1131.233927] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ccd3cdc-8618-4c6e-9fe1-c2f6c1f861f6 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293044', 'volume_id': '453401d3-6611-44e6-8f20-5e07f43fd6dd', 'name': 'volume-453401d3-6611-44e6-8f20-5e07f43fd6dd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4d4f5746-5873-4933-8741-c07ca43c13cb', 'attached_at': '', 'detached_at': '', 'volume_id': '453401d3-6611-44e6-8f20-5e07f43fd6dd', 'serial': '453401d3-6611-44e6-8f20-5e07f43fd6dd'} {{(pid=61974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1131.234886] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885610d3-b7c8-483f-b604-3c38da067f80 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.251751] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23221ca5-53f4-48db-b899-232858b3ad06 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.283190] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ccd3cdc-8618-4c6e-9fe1-c2f6c1f861f6 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] volume-453401d3-6611-44e6-8f20-5e07f43fd6dd/volume-453401d3-6611-44e6-8f20-5e07f43fd6dd.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1131.285544] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37e82448-d216-4c7d-8328-abeecacdef95 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.299552] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.307499] env[61974]: DEBUG oslo_vmware.api [None req-2ccd3cdc-8618-4c6e-9fe1-c2f6c1f861f6 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1131.307499] env[61974]: value = "task-1379561" [ 1131.307499] env[61974]: _type = "Task" [ 1131.307499] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.317513] env[61974]: DEBUG oslo_vmware.api [None req-2ccd3cdc-8618-4c6e-9fe1-c2f6c1f861f6 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379561, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.326554] env[61974]: DEBUG nova.scheduler.client.report [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1131.641705] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.818093] env[61974]: DEBUG oslo_vmware.api [None req-2ccd3cdc-8618-4c6e-9fe1-c2f6c1f861f6 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379561, 'name': ReconfigVM_Task, 'duration_secs': 0.394015} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.818093] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ccd3cdc-8618-4c6e-9fe1-c2f6c1f861f6 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Reconfigured VM instance instance-00000064 to attach disk [datastore1] volume-453401d3-6611-44e6-8f20-5e07f43fd6dd/volume-453401d3-6611-44e6-8f20-5e07f43fd6dd.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1131.822803] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a578553-5448-4428-8284-d55f067c4bec {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.841634] env[61974]: DEBUG oslo_vmware.api [None req-2ccd3cdc-8618-4c6e-9fe1-c2f6c1f861f6 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1131.841634] env[61974]: value = "task-1379562" [ 1131.841634] env[61974]: _type = "Task" [ 1131.841634] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.850261] env[61974]: DEBUG oslo_vmware.api [None req-2ccd3cdc-8618-4c6e-9fe1-c2f6c1f861f6 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379562, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.076808] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f5968cd4-d88a-46a2-b743-e430671a4c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.077069] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f5968cd4-d88a-46a2-b743-e430671a4c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.338489] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.157s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.341258] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.042s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.341452] env[61974]: DEBUG nova.objects.instance [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61974) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1132.352845] env[61974]: DEBUG oslo_vmware.api [None req-2ccd3cdc-8618-4c6e-9fe1-c2f6c1f861f6 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379562, 'name': ReconfigVM_Task, 'duration_secs': 0.158617} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.353125] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ccd3cdc-8618-4c6e-9fe1-c2f6c1f861f6 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293044', 'volume_id': '453401d3-6611-44e6-8f20-5e07f43fd6dd', 'name': 'volume-453401d3-6611-44e6-8f20-5e07f43fd6dd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4d4f5746-5873-4933-8741-c07ca43c13cb', 'attached_at': '', 'detached_at': '', 'volume_id': '453401d3-6611-44e6-8f20-5e07f43fd6dd', 'serial': '453401d3-6611-44e6-8f20-5e07f43fd6dd'} {{(pid=61974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1132.580651] env[61974]: INFO nova.compute.manager [None req-f5968cd4-d88a-46a2-b743-e430671a4c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Detaching volume b064d31a-890f-4a07-889e-b768586c162a [ 1132.612296] env[61974]: INFO nova.virt.block_device [None req-f5968cd4-d88a-46a2-b743-e430671a4c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Attempting to driver detach volume b064d31a-890f-4a07-889e-b768586c162a from mountpoint /dev/sdb [ 1132.612537] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5968cd4-d88a-46a2-b743-e430671a4c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Volume detach. Driver type: vmdk {{(pid=61974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1132.612797] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5968cd4-d88a-46a2-b743-e430671a4c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293039', 'volume_id': 'b064d31a-890f-4a07-889e-b768586c162a', 'name': 'volume-b064d31a-890f-4a07-889e-b768586c162a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8d71aaf0-e35c-4e6e-9094-d55b1544c3c8', 'attached_at': '', 'detached_at': '', 'volume_id': 'b064d31a-890f-4a07-889e-b768586c162a', 'serial': 'b064d31a-890f-4a07-889e-b768586c162a'} {{(pid=61974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1132.613870] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db79a91-958b-4bf3-af18-d6b0f5eb155b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.635203] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d5de443-ea33-4aa2-b3f0-13feafc7fbd8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.641907] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37f5e03-3b22-49aa-a81d-bc2a1b6fdd55 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.662741] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f517161-907e-4074-8547-d6662d81b17d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.676793] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5968cd4-d88a-46a2-b743-e430671a4c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] The volume has not been displaced from its original location: [datastore2] volume-b064d31a-890f-4a07-889e-b768586c162a/volume-b064d31a-890f-4a07-889e-b768586c162a.vmdk. No consolidation needed. {{(pid=61974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1132.681889] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5968cd4-d88a-46a2-b743-e430671a4c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Reconfiguring VM instance instance-00000062 to detach disk 2001 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1132.682137] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab97d692-2e79-468c-9b94-aa6d3ae0c67f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.700249] env[61974]: DEBUG oslo_vmware.api [None req-f5968cd4-d88a-46a2-b743-e430671a4c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1132.700249] env[61974]: value = "task-1379563" [ 1132.700249] env[61974]: _type = "Task" [ 1132.700249] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.710153] env[61974]: DEBUG oslo_vmware.api [None req-f5968cd4-d88a-46a2-b743-e430671a4c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379563, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.899125] env[61974]: INFO nova.scheduler.client.report [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Deleted allocation for migration 0669cb53-3eae-44eb-9128-9f397ddeaeec [ 1133.210899] env[61974]: DEBUG oslo_vmware.api [None req-f5968cd4-d88a-46a2-b743-e430671a4c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379563, 'name': ReconfigVM_Task, 'duration_secs': 0.396887} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.211239] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5968cd4-d88a-46a2-b743-e430671a4c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Reconfigured VM instance instance-00000062 to detach disk 2001 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1133.215897] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4486145c-95a6-4b53-b7df-3a1ce05ff552 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.230019] env[61974]: DEBUG oslo_vmware.api [None req-f5968cd4-d88a-46a2-b743-e430671a4c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1133.230019] env[61974]: value = "task-1379564" [ 1133.230019] env[61974]: _type = "Task" [ 1133.230019] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.237945] env[61974]: DEBUG oslo_vmware.api [None req-f5968cd4-d88a-46a2-b743-e430671a4c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379564, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.354581] env[61974]: DEBUG oslo_concurrency.lockutils [None req-bb83374d-7749-4524-a182-283c2f5c4e89 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.355625] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.714s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.356584] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.356584] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1133.357051] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd8888c-b077-4dff-b286-08631c089ac7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.365418] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251f34b1-49a1-4593-a58a-bf1e03e9da25 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.381350] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4725dae-fe2b-4d58-b63f-0622dc84f215 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.386049] env[61974]: DEBUG nova.objects.instance [None req-2ccd3cdc-8618-4c6e-9fe1-c2f6c1f861f6 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lazy-loading 'flavor' on Instance uuid 4d4f5746-5873-4933-8741-c07ca43c13cb {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1133.390318] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f4033bd-bb92-4173-83a6-277ce8b4c87c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.420846] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "3b0762f6-2419-491e-8929-835853a320af" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.517s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.422680] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180712MB free_disk=177GB free_vcpus=48 pci_devices=None {{(pid=61974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1133.422793] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.423190] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.739761] env[61974]: DEBUG oslo_vmware.api [None req-f5968cd4-d88a-46a2-b743-e430671a4c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379564, 'name': ReconfigVM_Task, 'duration_secs': 0.169694} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.740022] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5968cd4-d88a-46a2-b743-e430671a4c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293039', 'volume_id': 'b064d31a-890f-4a07-889e-b768586c162a', 'name': 'volume-b064d31a-890f-4a07-889e-b768586c162a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8d71aaf0-e35c-4e6e-9094-d55b1544c3c8', 'attached_at': '', 'detached_at': '', 'volume_id': 'b064d31a-890f-4a07-889e-b768586c162a', 'serial': 'b064d31a-890f-4a07-889e-b768586c162a'} {{(pid=61974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1133.893193] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2ccd3cdc-8618-4c6e-9fe1-c2f6c1f861f6 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "4d4f5746-5873-4933-8741-c07ca43c13cb" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.302s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.036924] env[61974]: DEBUG oslo_concurrency.lockutils [None req-09f87149-73f3-4c2d-a0ef-31b16a3a5c57 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "4d4f5746-5873-4933-8741-c07ca43c13cb" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.037281] env[61974]: DEBUG oslo_concurrency.lockutils [None req-09f87149-73f3-4c2d-a0ef-31b16a3a5c57 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "4d4f5746-5873-4933-8741-c07ca43c13cb" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.278594] env[61974]: DEBUG nova.objects.instance [None req-f5968cd4-d88a-46a2-b743-e430671a4c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lazy-loading 'flavor' on Instance uuid 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1134.451259] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 59c72be0-46de-4cb8-93d6-0a2c70c90e2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.451439] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.451568] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 4d4f5746-5873-4933-8741-c07ca43c13cb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.451691] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 93409fd8-c9aa-427b-94b2-93f3db982786 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.451813] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 3b0762f6-2419-491e-8929-835853a320af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1134.451992] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1134.452148] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1134.516289] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ab9c7c1-9146-46c5-be4e-1e42fbdf54c6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.523885] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db7fee30-2db5-41d4-99ba-bedfc31c4b59 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.554109] env[61974]: INFO nova.compute.manager [None req-09f87149-73f3-4c2d-a0ef-31b16a3a5c57 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Detaching volume 453401d3-6611-44e6-8f20-5e07f43fd6dd [ 1134.557123] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e3db7e3-b9ca-4856-b644-fabfee9077f9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.565887] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c67bc3d-9782-4dac-a715-16107d716145 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.578929] env[61974]: DEBUG nova.compute.provider_tree [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1134.583232] env[61974]: INFO nova.virt.block_device [None req-09f87149-73f3-4c2d-a0ef-31b16a3a5c57 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Attempting to driver detach volume 453401d3-6611-44e6-8f20-5e07f43fd6dd from mountpoint /dev/sdb [ 1134.583466] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-09f87149-73f3-4c2d-a0ef-31b16a3a5c57 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Volume detach. Driver type: vmdk {{(pid=61974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1134.583659] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-09f87149-73f3-4c2d-a0ef-31b16a3a5c57 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293044', 'volume_id': '453401d3-6611-44e6-8f20-5e07f43fd6dd', 'name': 'volume-453401d3-6611-44e6-8f20-5e07f43fd6dd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4d4f5746-5873-4933-8741-c07ca43c13cb', 'attached_at': '', 'detached_at': '', 'volume_id': '453401d3-6611-44e6-8f20-5e07f43fd6dd', 'serial': '453401d3-6611-44e6-8f20-5e07f43fd6dd'} {{(pid=61974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1134.584444] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ffbe42-9e8f-4cc1-952d-feaced2af66e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.604324] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519664a6-61c8-467d-8b49-e5aff6efe8cd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.610702] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a067e98-6ae6-423a-a8ad-c2068a580d10 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.629952] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d0e542-bbed-443e-a65a-d21651c1f41e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.643773] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-09f87149-73f3-4c2d-a0ef-31b16a3a5c57 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] The volume has not been displaced from its original location: [datastore1] volume-453401d3-6611-44e6-8f20-5e07f43fd6dd/volume-453401d3-6611-44e6-8f20-5e07f43fd6dd.vmdk. No consolidation needed. {{(pid=61974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1134.648952] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-09f87149-73f3-4c2d-a0ef-31b16a3a5c57 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Reconfiguring VM instance instance-00000064 to detach disk 2001 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1134.649352] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-296bbeea-8820-4b4c-8c26-dfe3824d5c97 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.666787] env[61974]: DEBUG oslo_vmware.api [None req-09f87149-73f3-4c2d-a0ef-31b16a3a5c57 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1134.666787] env[61974]: value = "task-1379565" [ 1134.666787] env[61974]: _type = "Task" [ 1134.666787] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.675411] env[61974]: DEBUG oslo_vmware.api [None req-09f87149-73f3-4c2d-a0ef-31b16a3a5c57 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379565, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.923857] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "3b0762f6-2419-491e-8929-835853a320af" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.924157] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "3b0762f6-2419-491e-8929-835853a320af" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.924387] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "3b0762f6-2419-491e-8929-835853a320af-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.924584] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "3b0762f6-2419-491e-8929-835853a320af-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.924767] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "3b0762f6-2419-491e-8929-835853a320af-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.927017] env[61974]: INFO nova.compute.manager [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Terminating instance [ 1134.929039] env[61974]: DEBUG nova.compute.manager [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1134.929219] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1134.930059] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed63520-7832-4cee-a032-8bf8289efdf4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.937854] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1134.938117] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7aa31582-3fb6-46b9-a353-db09e1d2dec8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.944234] env[61974]: DEBUG oslo_vmware.api [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1134.944234] env[61974]: value = "task-1379566" [ 1134.944234] env[61974]: _type = "Task" [ 1134.944234] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.951828] env[61974]: DEBUG oslo_vmware.api [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379566, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.081807] env[61974]: DEBUG nova.scheduler.client.report [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1135.177247] env[61974]: DEBUG oslo_vmware.api [None req-09f87149-73f3-4c2d-a0ef-31b16a3a5c57 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379565, 'name': ReconfigVM_Task, 'duration_secs': 0.201912} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.177547] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-09f87149-73f3-4c2d-a0ef-31b16a3a5c57 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Reconfigured VM instance instance-00000064 to detach disk 2001 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1135.182160] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0be1dc5a-1b5a-4b4b-b78f-88c60324569a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.196417] env[61974]: DEBUG oslo_vmware.api [None req-09f87149-73f3-4c2d-a0ef-31b16a3a5c57 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1135.196417] env[61974]: value = "task-1379567" [ 1135.196417] env[61974]: _type = "Task" [ 1135.196417] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.203947] env[61974]: DEBUG oslo_vmware.api [None req-09f87149-73f3-4c2d-a0ef-31b16a3a5c57 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379567, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.286255] env[61974]: DEBUG oslo_concurrency.lockutils [None req-f5968cd4-d88a-46a2-b743-e430671a4c10 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.209s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1135.453866] env[61974]: DEBUG oslo_vmware.api [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379566, 'name': PowerOffVM_Task, 'duration_secs': 0.228872} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.454082] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1135.454265] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1135.454789] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee93b6ce-ad71-485b-8318-4692511e8f18 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.586821] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1135.587055] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.164s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1135.706266] env[61974]: DEBUG oslo_vmware.api [None req-09f87149-73f3-4c2d-a0ef-31b16a3a5c57 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379567, 'name': ReconfigVM_Task, 'duration_secs': 0.130103} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.706555] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-09f87149-73f3-4c2d-a0ef-31b16a3a5c57 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293044', 'volume_id': '453401d3-6611-44e6-8f20-5e07f43fd6dd', 'name': 'volume-453401d3-6611-44e6-8f20-5e07f43fd6dd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4d4f5746-5873-4933-8741-c07ca43c13cb', 'attached_at': '', 'detached_at': '', 'volume_id': '453401d3-6611-44e6-8f20-5e07f43fd6dd', 'serial': '453401d3-6611-44e6-8f20-5e07f43fd6dd'} {{(pid=61974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1136.249421] env[61974]: DEBUG nova.objects.instance [None req-09f87149-73f3-4c2d-a0ef-31b16a3a5c57 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lazy-loading 'flavor' on Instance uuid 4d4f5746-5873-4933-8741-c07ca43c13cb {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1136.322476] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.322749] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.322989] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "8d71aaf0-e35c-4e6e-9094-d55b1544c3c8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.323228] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "8d71aaf0-e35c-4e6e-9094-d55b1544c3c8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.323412] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "8d71aaf0-e35c-4e6e-9094-d55b1544c3c8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.325466] env[61974]: INFO nova.compute.manager [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Terminating instance [ 1136.327322] env[61974]: DEBUG nova.compute.manager [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1136.327520] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1136.328354] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d48195-d60a-489c-84d4-a9b3fbb98fff {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.335957] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1136.336189] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3e9fef3f-c66d-464e-b8f3-38b58f876301 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.342463] env[61974]: DEBUG oslo_vmware.api [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1136.342463] env[61974]: value = "task-1379569" [ 1136.342463] env[61974]: _type = "Task" [ 1136.342463] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.349605] env[61974]: DEBUG oslo_vmware.api [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379569, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.786356] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1136.786419] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1136.787026] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Deleting the datastore file [datastore1] 3b0762f6-2419-491e-8929-835853a320af {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1136.787026] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f76e9fe-f216-4a9c-8646-2f29a63a0377 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.793092] env[61974]: DEBUG oslo_vmware.api [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for the task: (returnval){ [ 1136.793092] env[61974]: value = "task-1379570" [ 1136.793092] env[61974]: _type = "Task" [ 1136.793092] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.803063] env[61974]: DEBUG oslo_vmware.api [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379570, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.852248] env[61974]: DEBUG oslo_vmware.api [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379569, 'name': PowerOffVM_Task, 'duration_secs': 0.202463} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.852554] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1136.852741] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1136.853026] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-946b9dbb-d364-4bcd-9b96-e9da6fb7fea8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.915550] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1136.915731] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1136.915937] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Deleting the datastore file [datastore2] 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1136.916337] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c52661c-ed67-467a-af5d-79cbd08f2c4c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.922390] env[61974]: DEBUG oslo_vmware.api [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1136.922390] env[61974]: value = "task-1379572" [ 1136.922390] env[61974]: _type = "Task" [ 1136.922390] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.931829] env[61974]: DEBUG oslo_vmware.api [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379572, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.257056] env[61974]: DEBUG oslo_concurrency.lockutils [None req-09f87149-73f3-4c2d-a0ef-31b16a3a5c57 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "4d4f5746-5873-4933-8741-c07ca43c13cb" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.219s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.302973] env[61974]: DEBUG oslo_vmware.api [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Task: {'id': task-1379570, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150272} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.303498] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1137.303711] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1137.303899] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1137.304090] env[61974]: INFO nova.compute.manager [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] [instance: 3b0762f6-2419-491e-8929-835853a320af] Took 2.37 seconds to destroy the instance on the hypervisor. [ 1137.304335] env[61974]: DEBUG oslo.service.loopingcall [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1137.304525] env[61974]: DEBUG nova.compute.manager [-] [instance: 3b0762f6-2419-491e-8929-835853a320af] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1137.304617] env[61974]: DEBUG nova.network.neutron [-] [instance: 3b0762f6-2419-491e-8929-835853a320af] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1137.432216] env[61974]: DEBUG oslo_vmware.api [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379572, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136797} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.432420] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1137.432636] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1137.432823] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1137.433018] env[61974]: INFO nova.compute.manager [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1137.433282] env[61974]: DEBUG oslo.service.loopingcall [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1137.433520] env[61974]: DEBUG nova.compute.manager [-] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1137.433636] env[61974]: DEBUG nova.network.neutron [-] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1137.570826] env[61974]: DEBUG nova.compute.manager [req-26047639-23d2-4aaf-8975-1c0050959966 req-c7ca7a98-f1d4-429b-bd2d-f4908a667f25 service nova] [instance: 3b0762f6-2419-491e-8929-835853a320af] Received event network-vif-deleted-b67804ff-e7c2-42a2-9b55-3965ed99c857 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1137.571064] env[61974]: INFO nova.compute.manager [req-26047639-23d2-4aaf-8975-1c0050959966 req-c7ca7a98-f1d4-429b-bd2d-f4908a667f25 service nova] [instance: 3b0762f6-2419-491e-8929-835853a320af] Neutron deleted interface b67804ff-e7c2-42a2-9b55-3965ed99c857; detaching it from the instance and deleting it from the info cache [ 1137.571293] env[61974]: DEBUG nova.network.neutron [req-26047639-23d2-4aaf-8975-1c0050959966 req-c7ca7a98-f1d4-429b-bd2d-f4908a667f25 service nova] [instance: 3b0762f6-2419-491e-8929-835853a320af] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.932307] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "93409fd8-c9aa-427b-94b2-93f3db982786" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1137.932551] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "93409fd8-c9aa-427b-94b2-93f3db982786" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.047200] env[61974]: DEBUG nova.network.neutron [-] [instance: 3b0762f6-2419-491e-8929-835853a320af] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.074008] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9e11a81a-2f87-409e-97e4-06e2d31a314a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.083894] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d610a5bf-c465-42df-a412-baf544aa707a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.110449] env[61974]: DEBUG nova.compute.manager [req-26047639-23d2-4aaf-8975-1c0050959966 req-c7ca7a98-f1d4-429b-bd2d-f4908a667f25 service nova] [instance: 3b0762f6-2419-491e-8929-835853a320af] Detach interface failed, port_id=b67804ff-e7c2-42a2-9b55-3965ed99c857, reason: Instance 3b0762f6-2419-491e-8929-835853a320af could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1138.282694] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "4d4f5746-5873-4933-8741-c07ca43c13cb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1138.283062] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "4d4f5746-5873-4933-8741-c07ca43c13cb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.283211] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "4d4f5746-5873-4933-8741-c07ca43c13cb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1138.283418] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "4d4f5746-5873-4933-8741-c07ca43c13cb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.283593] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "4d4f5746-5873-4933-8741-c07ca43c13cb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.285738] env[61974]: INFO nova.compute.manager [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Terminating instance [ 1138.287420] env[61974]: DEBUG nova.compute.manager [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1138.287616] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1138.288432] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09eb900d-2f58-4491-8bf4-865328ff5f60 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.296015] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1138.296277] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9fc59a3a-6988-44bd-beb3-86af21e61756 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.302706] env[61974]: DEBUG oslo_vmware.api [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1138.302706] env[61974]: value = "task-1379573" [ 1138.302706] env[61974]: _type = "Task" [ 1138.302706] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.309866] env[61974]: DEBUG oslo_vmware.api [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379573, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.410022] env[61974]: DEBUG nova.network.neutron [-] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.436641] env[61974]: DEBUG nova.compute.utils [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1138.550620] env[61974]: INFO nova.compute.manager [-] [instance: 3b0762f6-2419-491e-8929-835853a320af] Took 1.25 seconds to deallocate network for instance. [ 1138.812741] env[61974]: DEBUG oslo_vmware.api [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379573, 'name': PowerOffVM_Task, 'duration_secs': 0.162076} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.813070] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1138.813220] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1138.813476] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc716742-33f4-4f83-b2cf-9c83ab7837e0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.872829] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1138.873089] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1138.873279] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Deleting the datastore file [datastore2] 4d4f5746-5873-4933-8741-c07ca43c13cb {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1138.873546] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-520f0588-18e5-4a53-b9b9-278a38d4dba6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.879040] env[61974]: DEBUG oslo_vmware.api [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1138.879040] env[61974]: value = "task-1379575" [ 1138.879040] env[61974]: _type = "Task" [ 1138.879040] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.887478] env[61974]: DEBUG oslo_vmware.api [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379575, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.913141] env[61974]: INFO nova.compute.manager [-] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Took 1.48 seconds to deallocate network for instance. [ 1138.939864] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "93409fd8-c9aa-427b-94b2-93f3db982786" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1139.057306] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1139.057605] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1139.057892] env[61974]: DEBUG nova.objects.instance [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lazy-loading 'resources' on Instance uuid 3b0762f6-2419-491e-8929-835853a320af {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1139.389073] env[61974]: DEBUG oslo_vmware.api [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379575, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134393} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.389073] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1139.389073] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1139.389461] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1139.389461] env[61974]: INFO nova.compute.manager [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1139.389655] env[61974]: DEBUG oslo.service.loopingcall [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1139.389853] env[61974]: DEBUG nova.compute.manager [-] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1139.389950] env[61974]: DEBUG nova.network.neutron [-] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1139.420278] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1139.603935] env[61974]: DEBUG nova.compute.manager [req-92a1f07e-0a43-40bc-a4cd-c5ef40732078 req-9f716778-0b84-48de-9805-547922ea4cef service nova] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Received event network-vif-deleted-e43abc26-b25e-444f-9857-3967570e294a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1139.632859] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857d4c51-3c1a-4037-8b32-d390955e1f37 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.642213] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63433ab4-5758-497b-b3ba-5c126fe1bc87 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.671440] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e39e84-6296-48de-9450-ea2b5c8b2048 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.677810] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc5aa1c9-884e-4a42-a54d-1c72662514ad {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.690725] env[61974]: DEBUG nova.compute.provider_tree [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1140.000895] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "93409fd8-c9aa-427b-94b2-93f3db982786" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.001600] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "93409fd8-c9aa-427b-94b2-93f3db982786" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.001849] env[61974]: INFO nova.compute.manager [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Attaching volume 18a01aaf-194f-482b-8bbf-d6f159cf5f96 to /dev/sdb [ 1140.038084] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2891f330-ae89-4a4f-9033-14a1d5f71c9e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.045277] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2bac36-3d2e-40ab-9236-eb4fccd10962 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.057869] env[61974]: DEBUG nova.virt.block_device [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Updating existing volume attachment record: 8f058206-95fe-4f6e-b815-77cbfa9858ec {{(pid=61974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1140.194989] env[61974]: DEBUG nova.scheduler.client.report [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1140.524665] env[61974]: DEBUG nova.network.neutron [-] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.698512] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.641s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.700766] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.281s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.701018] env[61974]: DEBUG nova.objects.instance [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lazy-loading 'resources' on Instance uuid 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1140.720532] env[61974]: INFO nova.scheduler.client.report [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Deleted allocations for instance 3b0762f6-2419-491e-8929-835853a320af [ 1141.026996] env[61974]: INFO nova.compute.manager [-] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Took 1.64 seconds to deallocate network for instance. [ 1141.229112] env[61974]: DEBUG oslo_concurrency.lockutils [None req-4d69fe7d-1fe4-4e96-99e1-e9d1b4398241 tempest-DeleteServersTestJSON-292112520 tempest-DeleteServersTestJSON-292112520-project-member] Lock "3b0762f6-2419-491e-8929-835853a320af" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.305s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.267883] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fceb6fa-5093-4455-a548-1302f0a5b008 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.275404] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642238d6-a46d-41f1-adaa-fef5755b5b4a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.307303] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db0fccbf-4c40-4a96-88ac-7ffde41f4b23 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.315321] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f1a3c7-6a5f-4254-9ed7-cf300711baac {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.329297] env[61974]: DEBUG nova.compute.provider_tree [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1141.533784] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.630250] env[61974]: DEBUG nova.compute.manager [req-7c1cb966-2cf5-4a47-bb0c-38117242d15f req-d6dcff78-c34b-4bef-83ff-5933302a9996 service nova] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Received event network-vif-deleted-55afa9de-d15b-470c-a494-746a6ad74042 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1141.833119] env[61974]: DEBUG nova.scheduler.client.report [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1142.339082] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.638s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.341634] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.808s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1142.341898] env[61974]: DEBUG nova.objects.instance [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lazy-loading 'resources' on Instance uuid 4d4f5746-5873-4933-8741-c07ca43c13cb {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1142.356599] env[61974]: INFO nova.scheduler.client.report [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Deleted allocations for instance 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8 [ 1142.580234] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1142.580605] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1142.864891] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d35da1c2-99d9-4673-9a88-e183c8d8997e tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "8d71aaf0-e35c-4e6e-9094-d55b1544c3c8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.542s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.894952] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bcb5148-bd02-4d18-8f91-ee96c39be68e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.902571] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eede79f3-b290-421c-bfe7-fda3d5939b7f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.931740] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b0d2aa7-e31e-4754-8c86-06ac921c95f3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.938670] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ec5bf6-f164-46d0-a642-cd140c67dbc4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.951283] env[61974]: DEBUG nova.compute.provider_tree [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1143.455224] env[61974]: DEBUG nova.scheduler.client.report [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1143.960283] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.618s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.983838] env[61974]: INFO nova.scheduler.client.report [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Deleted allocations for instance 4d4f5746-5873-4933-8741-c07ca43c13cb [ 1144.491685] env[61974]: DEBUG oslo_concurrency.lockutils [None req-52387e29-0ef8-4967-99b9-ab48e210bce1 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "4d4f5746-5873-4933-8741-c07ca43c13cb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.208s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1144.602305] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Volume attach. Driver type: vmdk {{(pid=61974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1144.602556] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293045', 'volume_id': '18a01aaf-194f-482b-8bbf-d6f159cf5f96', 'name': 'volume-18a01aaf-194f-482b-8bbf-d6f159cf5f96', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '93409fd8-c9aa-427b-94b2-93f3db982786', 'attached_at': '', 'detached_at': '', 'volume_id': '18a01aaf-194f-482b-8bbf-d6f159cf5f96', 'serial': '18a01aaf-194f-482b-8bbf-d6f159cf5f96'} {{(pid=61974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1144.603459] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa41486-f1dc-4394-b8c2-042a4f284d38 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.619820] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790b31ad-74e2-4aa6-a33f-b8d3ee12695e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.644822] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] volume-18a01aaf-194f-482b-8bbf-d6f159cf5f96/volume-18a01aaf-194f-482b-8bbf-d6f159cf5f96.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1144.645195] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-69af411c-7830-4603-8c1a-561919840239 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.663188] env[61974]: DEBUG oslo_vmware.api [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1144.663188] env[61974]: value = "task-1379579" [ 1144.663188] env[61974]: _type = "Task" [ 1144.663188] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.673473] env[61974]: DEBUG oslo_vmware.api [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379579, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.022202] env[61974]: DEBUG oslo_concurrency.lockutils [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.022480] env[61974]: DEBUG oslo_concurrency.lockutils [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1145.078350] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.078577] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.172450] env[61974]: DEBUG oslo_vmware.api [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379579, 'name': ReconfigVM_Task, 'duration_secs': 0.342549} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.172731] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Reconfigured VM instance instance-00000068 to attach disk [datastore2] volume-18a01aaf-194f-482b-8bbf-d6f159cf5f96/volume-18a01aaf-194f-482b-8bbf-d6f159cf5f96.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1145.177319] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-641ffc94-9cb8-468b-b0f2-74ac46b80e8f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.191742] env[61974]: DEBUG oslo_vmware.api [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1145.191742] env[61974]: value = "task-1379580" [ 1145.191742] env[61974]: _type = "Task" [ 1145.191742] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.199336] env[61974]: DEBUG oslo_vmware.api [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379580, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.525175] env[61974]: DEBUG nova.compute.manager [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1145.575580] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.704809] env[61974]: DEBUG oslo_vmware.api [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379580, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.046074] env[61974]: DEBUG oslo_concurrency.lockutils [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.046350] env[61974]: DEBUG oslo_concurrency.lockutils [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.047776] env[61974]: INFO nova.compute.claims [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1146.082918] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1146.082918] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1146.082918] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1146.082918] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Cleaning up deleted instances with incomplete migration {{(pid=61974) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 1146.202317] env[61974]: DEBUG oslo_vmware.api [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379580, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.460466] env[61974]: DEBUG oslo_concurrency.lockutils [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.460707] env[61974]: DEBUG oslo_concurrency.lockutils [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.702838] env[61974]: DEBUG oslo_vmware.api [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379580, 'name': ReconfigVM_Task, 'duration_secs': 1.141375} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.703188] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293045', 'volume_id': '18a01aaf-194f-482b-8bbf-d6f159cf5f96', 'name': 'volume-18a01aaf-194f-482b-8bbf-d6f159cf5f96', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '93409fd8-c9aa-427b-94b2-93f3db982786', 'attached_at': '', 'detached_at': '', 'volume_id': '18a01aaf-194f-482b-8bbf-d6f159cf5f96', 'serial': '18a01aaf-194f-482b-8bbf-d6f159cf5f96'} {{(pid=61974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1146.962887] env[61974]: DEBUG nova.compute.manager [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1147.081854] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1147.082266] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Starting heal instance info cache {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1147.124744] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbfc73a4-a44d-4c45-bca6-8cd905b8558e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.132839] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89569a89-b724-45bd-843c-befb4a4227f0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.163783] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ca119e-a80f-43d9-b713-1f32c8286e80 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.171133] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dea8aad-d147-452f-ab3e-75facb531978 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.184166] env[61974]: DEBUG nova.compute.provider_tree [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1147.482521] env[61974]: DEBUG oslo_concurrency.lockutils [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1147.716301] env[61974]: ERROR nova.scheduler.client.report [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [req-62603616-0cb8-4196-bc0a-9ddf2eace25a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 81f72dd1-35ef-4b87-b120-a6ea5ab8608a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-62603616-0cb8-4196-bc0a-9ddf2eace25a"}]} [ 1147.735582] env[61974]: DEBUG nova.scheduler.client.report [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Refreshing inventories for resource provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1147.741354] env[61974]: DEBUG nova.objects.instance [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lazy-loading 'flavor' on Instance uuid 93409fd8-c9aa-427b-94b2-93f3db982786 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1147.750783] env[61974]: DEBUG nova.scheduler.client.report [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Updating ProviderTree inventory for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1147.751066] env[61974]: DEBUG nova.compute.provider_tree [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1147.762505] env[61974]: DEBUG nova.scheduler.client.report [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Refreshing aggregate associations for resource provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a, aggregates: None {{(pid=61974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1147.782507] env[61974]: DEBUG nova.scheduler.client.report [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Refreshing trait associations for resource provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=61974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1147.854178] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44cc1a45-bb31-47d4-961f-3cd7fcc96f9a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.865606] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6fa561-1a7d-4aeb-8e79-110fdd91f5cf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.919980] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9477d8a7-7ae1-4e22-bfac-73c9ee058d10 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.929020] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fafa7c4-e9bb-4802-a873-874c475a5382 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.945210] env[61974]: DEBUG nova.compute.provider_tree [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1148.246185] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b161c3de-40c0-41fb-b824-f1c543abaac6 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "93409fd8-c9aa-427b-94b2-93f3db982786" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.244s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1148.529841] env[61974]: DEBUG nova.scheduler.client.report [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Updated inventory for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with generation 130 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1148.530396] env[61974]: DEBUG nova.compute.provider_tree [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Updating resource provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a generation from 130 to 131 during operation: update_inventory {{(pid=61974) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1148.530396] env[61974]: DEBUG nova.compute.provider_tree [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1149.035432] env[61974]: DEBUG oslo_concurrency.lockutils [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.989s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1149.035960] env[61974]: DEBUG nova.compute.manager [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1149.041431] env[61974]: DEBUG oslo_concurrency.lockutils [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.557s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1149.042958] env[61974]: INFO nova.compute.claims [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1149.302641] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2e4cf39a-316c-48cd-aff2-83477f385cf4 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "93409fd8-c9aa-427b-94b2-93f3db982786" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1149.302641] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2e4cf39a-316c-48cd-aff2-83477f385cf4 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "93409fd8-c9aa-427b-94b2-93f3db982786" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1149.547888] env[61974]: DEBUG nova.compute.utils [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1149.549821] env[61974]: DEBUG nova.compute.manager [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1149.549821] env[61974]: DEBUG nova.network.neutron [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1149.595471] env[61974]: DEBUG nova.policy [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4896588cebd84071a573046de7006429', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2db6af28263c40708c2466226ce03009', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1149.597087] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Didn't find any instances for network info cache update. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1149.597264] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1149.597758] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1149.597935] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1149.598123] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1149.598302] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Cleaning up deleted instances {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1149.806775] env[61974]: DEBUG nova.compute.utils [None req-2e4cf39a-316c-48cd-aff2-83477f385cf4 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1149.891099] env[61974]: DEBUG nova.network.neutron [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Successfully created port: 919720aa-453c-436b-83f3-2f0181f8391e {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1150.058449] env[61974]: DEBUG nova.compute.manager [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1150.111288] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] There are 41 instances to clean {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 1150.111582] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 79be0650-faba-4516-b7cb-a25d45e941ee] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1150.144550] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-165dda7d-0c38-4c60-887f-0b5cba1dbe40 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.153996] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97efcea6-7204-47b0-99a4-db0a07431841 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.187295] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7e9e4d7-8191-4e2b-a164-150ba63f2931 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.195249] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08dddee7-c910-4a53-9de3-eb53ff033f7c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.210615] env[61974]: DEBUG nova.compute.provider_tree [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1150.311849] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2e4cf39a-316c-48cd-aff2-83477f385cf4 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "93409fd8-c9aa-427b-94b2-93f3db982786" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.619959] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 3b0762f6-2419-491e-8929-835853a320af] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1150.715166] env[61974]: DEBUG nova.scheduler.client.report [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1151.067382] env[61974]: DEBUG nova.compute.manager [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1151.091518] env[61974]: DEBUG nova.virt.hardware [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1151.091768] env[61974]: DEBUG nova.virt.hardware [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1151.092025] env[61974]: DEBUG nova.virt.hardware [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1151.092242] env[61974]: DEBUG nova.virt.hardware [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1151.092393] env[61974]: DEBUG nova.virt.hardware [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1151.092543] env[61974]: DEBUG nova.virt.hardware [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1151.092755] env[61974]: DEBUG nova.virt.hardware [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1151.092917] env[61974]: DEBUG nova.virt.hardware [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1151.093098] env[61974]: DEBUG nova.virt.hardware [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1151.093267] env[61974]: DEBUG nova.virt.hardware [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1151.093438] env[61974]: DEBUG nova.virt.hardware [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1151.094304] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ece6fb-3681-4ef5-aa0d-389ff919f702 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.102220] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b7042af-cfac-482b-a01c-e4c3f12c7f73 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.122709] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 14a74bf6-712b-4b82-a24f-6367d5180c6a] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1151.221041] env[61974]: DEBUG oslo_concurrency.lockutils [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.181s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.221650] env[61974]: DEBUG nova.compute.manager [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1151.374212] env[61974]: DEBUG nova.compute.manager [req-bce53f93-44d6-4c3a-9d79-d73e5aab5f61 req-e0b7d4f1-6125-4989-8dd1-cb71425a1862 service nova] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Received event network-vif-plugged-919720aa-453c-436b-83f3-2f0181f8391e {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1151.374472] env[61974]: DEBUG oslo_concurrency.lockutils [req-bce53f93-44d6-4c3a-9d79-d73e5aab5f61 req-e0b7d4f1-6125-4989-8dd1-cb71425a1862 service nova] Acquiring lock "4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.374648] env[61974]: DEBUG oslo_concurrency.lockutils [req-bce53f93-44d6-4c3a-9d79-d73e5aab5f61 req-e0b7d4f1-6125-4989-8dd1-cb71425a1862 service nova] Lock "4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.374818] env[61974]: DEBUG oslo_concurrency.lockutils [req-bce53f93-44d6-4c3a-9d79-d73e5aab5f61 req-e0b7d4f1-6125-4989-8dd1-cb71425a1862 service nova] Lock "4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.374993] env[61974]: DEBUG nova.compute.manager [req-bce53f93-44d6-4c3a-9d79-d73e5aab5f61 req-e0b7d4f1-6125-4989-8dd1-cb71425a1862 service nova] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] No waiting events found dispatching network-vif-plugged-919720aa-453c-436b-83f3-2f0181f8391e {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1151.375564] env[61974]: WARNING nova.compute.manager [req-bce53f93-44d6-4c3a-9d79-d73e5aab5f61 req-e0b7d4f1-6125-4989-8dd1-cb71425a1862 service nova] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Received unexpected event network-vif-plugged-919720aa-453c-436b-83f3-2f0181f8391e for instance with vm_state building and task_state spawning. [ 1151.393052] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2e4cf39a-316c-48cd-aff2-83477f385cf4 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "93409fd8-c9aa-427b-94b2-93f3db982786" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.393283] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2e4cf39a-316c-48cd-aff2-83477f385cf4 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "93409fd8-c9aa-427b-94b2-93f3db982786" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.393531] env[61974]: INFO nova.compute.manager [None req-2e4cf39a-316c-48cd-aff2-83477f385cf4 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Attaching volume a1e868e9-b258-4c58-9a5a-001530b1b12a to /dev/sdc [ 1151.425616] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a33cf65-03a8-4a74-a768-2557188f0d96 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.432413] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b01781-995c-4cd7-a6ba-ca5004619b12 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.445263] env[61974]: DEBUG nova.virt.block_device [None req-2e4cf39a-316c-48cd-aff2-83477f385cf4 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Updating existing volume attachment record: 0451930d-9a88-4806-983e-beee0bc12346 {{(pid=61974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1151.469025] env[61974]: DEBUG nova.network.neutron [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Successfully updated port: 919720aa-453c-436b-83f3-2f0181f8391e {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1151.625950] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 18559ea6-0cc4-4201-bafa-e63868753a06] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1151.726624] env[61974]: DEBUG nova.compute.utils [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1151.728055] env[61974]: DEBUG nova.compute.manager [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1151.728658] env[61974]: DEBUG nova.network.neutron [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1151.767374] env[61974]: DEBUG nova.policy [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '453e22de6c0f478d93d6269ea122d660', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c61c671d85b64b28872586c2816b83f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1151.971786] env[61974]: DEBUG oslo_concurrency.lockutils [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "refresh_cache-4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1151.971980] env[61974]: DEBUG oslo_concurrency.lockutils [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired lock "refresh_cache-4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.975079] env[61974]: DEBUG nova.network.neutron [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1152.044614] env[61974]: DEBUG nova.network.neutron [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Successfully created port: 46b5260c-16a6-4544-939f-c298e667769f {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1152.130506] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 33d2889a-7f80-4d65-8325-91355c9bcb46] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1152.230906] env[61974]: DEBUG nova.compute.manager [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1152.503051] env[61974]: DEBUG nova.network.neutron [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1152.624704] env[61974]: DEBUG nova.network.neutron [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Updating instance_info_cache with network_info: [{"id": "919720aa-453c-436b-83f3-2f0181f8391e", "address": "fa:16:3e:0e:7a:19", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap919720aa-45", "ovs_interfaceid": "919720aa-453c-436b-83f3-2f0181f8391e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.636838] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 4d4f5746-5873-4933-8741-c07ca43c13cb] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1153.127235] env[61974]: DEBUG oslo_concurrency.lockutils [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Releasing lock "refresh_cache-4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1153.127551] env[61974]: DEBUG nova.compute.manager [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Instance network_info: |[{"id": "919720aa-453c-436b-83f3-2f0181f8391e", "address": "fa:16:3e:0e:7a:19", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap919720aa-45", "ovs_interfaceid": "919720aa-453c-436b-83f3-2f0181f8391e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1153.128015] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:7a:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c791d09c-1086-4ee1-bcde-6ca7d259cabd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '919720aa-453c-436b-83f3-2f0181f8391e', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1153.135466] env[61974]: DEBUG oslo.service.loopingcall [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1153.135681] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1153.135913] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f686881-632e-48e7-8cda-56ac802b1364 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.150128] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 584ce365-9125-4c2a-9668-f921beb599e0] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1153.157404] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1153.157404] env[61974]: value = "task-1379582" [ 1153.157404] env[61974]: _type = "Task" [ 1153.157404] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.166507] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379582, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.239718] env[61974]: DEBUG nova.compute.manager [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1153.265049] env[61974]: DEBUG nova.virt.hardware [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1153.265349] env[61974]: DEBUG nova.virt.hardware [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1153.265514] env[61974]: DEBUG nova.virt.hardware [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1153.265704] env[61974]: DEBUG nova.virt.hardware [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1153.265856] env[61974]: DEBUG nova.virt.hardware [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1153.266017] env[61974]: DEBUG nova.virt.hardware [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1153.266252] env[61974]: DEBUG nova.virt.hardware [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1153.266421] env[61974]: DEBUG nova.virt.hardware [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1153.266590] env[61974]: DEBUG nova.virt.hardware [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1153.266755] env[61974]: DEBUG nova.virt.hardware [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1153.266933] env[61974]: DEBUG nova.virt.hardware [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1153.268260] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b205d8c3-8917-4635-9ca8-2b552fc6137f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.276022] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e74c6cf-2f85-42f6-bbc8-62d205f91e4b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.403398] env[61974]: DEBUG nova.compute.manager [req-cddf603e-fbb8-4d07-bf16-e6c72d1d040f req-2d653ec1-d58a-42aa-bc38-69a0c9de6106 service nova] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Received event network-changed-919720aa-453c-436b-83f3-2f0181f8391e {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1153.403626] env[61974]: DEBUG nova.compute.manager [req-cddf603e-fbb8-4d07-bf16-e6c72d1d040f req-2d653ec1-d58a-42aa-bc38-69a0c9de6106 service nova] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Refreshing instance network info cache due to event network-changed-919720aa-453c-436b-83f3-2f0181f8391e. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1153.403878] env[61974]: DEBUG oslo_concurrency.lockutils [req-cddf603e-fbb8-4d07-bf16-e6c72d1d040f req-2d653ec1-d58a-42aa-bc38-69a0c9de6106 service nova] Acquiring lock "refresh_cache-4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1153.404258] env[61974]: DEBUG oslo_concurrency.lockutils [req-cddf603e-fbb8-4d07-bf16-e6c72d1d040f req-2d653ec1-d58a-42aa-bc38-69a0c9de6106 service nova] Acquired lock "refresh_cache-4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.404491] env[61974]: DEBUG nova.network.neutron [req-cddf603e-fbb8-4d07-bf16-e6c72d1d040f req-2d653ec1-d58a-42aa-bc38-69a0c9de6106 service nova] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Refreshing network info cache for port 919720aa-453c-436b-83f3-2f0181f8391e {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1153.408437] env[61974]: DEBUG nova.compute.manager [req-6876fb17-22fb-4426-876c-0007420c1834 req-53c06282-d611-42be-bcd6-7fcf907cf0aa service nova] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Received event network-vif-plugged-46b5260c-16a6-4544-939f-c298e667769f {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1153.408687] env[61974]: DEBUG oslo_concurrency.lockutils [req-6876fb17-22fb-4426-876c-0007420c1834 req-53c06282-d611-42be-bcd6-7fcf907cf0aa service nova] Acquiring lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1153.408861] env[61974]: DEBUG oslo_concurrency.lockutils [req-6876fb17-22fb-4426-876c-0007420c1834 req-53c06282-d611-42be-bcd6-7fcf907cf0aa service nova] Lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.409051] env[61974]: DEBUG oslo_concurrency.lockutils [req-6876fb17-22fb-4426-876c-0007420c1834 req-53c06282-d611-42be-bcd6-7fcf907cf0aa service nova] Lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1153.409219] env[61974]: DEBUG nova.compute.manager [req-6876fb17-22fb-4426-876c-0007420c1834 req-53c06282-d611-42be-bcd6-7fcf907cf0aa service nova] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] No waiting events found dispatching network-vif-plugged-46b5260c-16a6-4544-939f-c298e667769f {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1153.409395] env[61974]: WARNING nova.compute.manager [req-6876fb17-22fb-4426-876c-0007420c1834 req-53c06282-d611-42be-bcd6-7fcf907cf0aa service nova] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Received unexpected event network-vif-plugged-46b5260c-16a6-4544-939f-c298e667769f for instance with vm_state building and task_state spawning. [ 1153.653223] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 8d71aaf0-e35c-4e6e-9094-d55b1544c3c8] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1153.667020] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379582, 'name': CreateVM_Task, 'duration_secs': 0.321234} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.667287] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1153.668133] env[61974]: DEBUG oslo_concurrency.lockutils [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1153.668417] env[61974]: DEBUG oslo_concurrency.lockutils [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.668849] env[61974]: DEBUG oslo_concurrency.lockutils [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1153.669576] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdbabdf3-d6ae-4fda-a6c4-1a78e4ecdbcd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.674463] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1153.674463] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5257b94e-46cc-2bcc-537c-4672832b9963" [ 1153.674463] env[61974]: _type = "Task" [ 1153.674463] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.683732] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5257b94e-46cc-2bcc-537c-4672832b9963, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.017048] env[61974]: DEBUG nova.network.neutron [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Successfully updated port: 46b5260c-16a6-4544-939f-c298e667769f {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1154.110159] env[61974]: DEBUG nova.network.neutron [req-cddf603e-fbb8-4d07-bf16-e6c72d1d040f req-2d653ec1-d58a-42aa-bc38-69a0c9de6106 service nova] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Updated VIF entry in instance network info cache for port 919720aa-453c-436b-83f3-2f0181f8391e. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1154.110577] env[61974]: DEBUG nova.network.neutron [req-cddf603e-fbb8-4d07-bf16-e6c72d1d040f req-2d653ec1-d58a-42aa-bc38-69a0c9de6106 service nova] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Updating instance_info_cache with network_info: [{"id": "919720aa-453c-436b-83f3-2f0181f8391e", "address": "fa:16:3e:0e:7a:19", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap919720aa-45", "ovs_interfaceid": "919720aa-453c-436b-83f3-2f0181f8391e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.157199] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: ca8a238c-4b52-4016-8614-c2f8ad7891f7] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1154.188097] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5257b94e-46cc-2bcc-537c-4672832b9963, 'name': SearchDatastore_Task, 'duration_secs': 0.009405} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.188493] env[61974]: DEBUG oslo_concurrency.lockutils [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1154.188811] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1154.189142] env[61974]: DEBUG oslo_concurrency.lockutils [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1154.189361] env[61974]: DEBUG oslo_concurrency.lockutils [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.189615] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1154.189939] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89bb49c6-ab94-4069-bf96-ae61eda042ec {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.198830] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1154.199079] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1154.200041] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b0484a3-977c-42c5-81f9-e7e1b6e70b22 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.206549] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1154.206549] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52f227eb-6589-7036-3b65-155e54944078" [ 1154.206549] env[61974]: _type = "Task" [ 1154.206549] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.216720] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52f227eb-6589-7036-3b65-155e54944078, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.523060] env[61974]: DEBUG oslo_concurrency.lockutils [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "refresh_cache-e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1154.523220] env[61974]: DEBUG oslo_concurrency.lockutils [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquired lock "refresh_cache-e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.523373] env[61974]: DEBUG nova.network.neutron [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1154.613105] env[61974]: DEBUG oslo_concurrency.lockutils [req-cddf603e-fbb8-4d07-bf16-e6c72d1d040f req-2d653ec1-d58a-42aa-bc38-69a0c9de6106 service nova] Releasing lock "refresh_cache-4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1154.659864] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: e3dc39a5-4e90-472d-8b62-fd17572852f7] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1154.716948] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52f227eb-6589-7036-3b65-155e54944078, 'name': SearchDatastore_Task, 'duration_secs': 0.00826} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.717743] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e3b6773-a790-4929-93b2-cf24f79113a8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.723746] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1154.723746] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]5232a1c2-ec8c-2218-97a2-13342ea6eb88" [ 1154.723746] env[61974]: _type = "Task" [ 1154.723746] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.731070] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5232a1c2-ec8c-2218-97a2-13342ea6eb88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.053892] env[61974]: DEBUG nova.network.neutron [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1155.163194] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: f475d963-0c09-4115-885a-04e28895df14] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1155.172866] env[61974]: DEBUG nova.network.neutron [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Updating instance_info_cache with network_info: [{"id": "46b5260c-16a6-4544-939f-c298e667769f", "address": "fa:16:3e:6e:31:3c", "network": {"id": "be36ebfc-3548-4420-b5b4-b3efb499516a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1190763400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61c671d85b64b28872586c2816b83f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46b5260c-16", "ovs_interfaceid": "46b5260c-16a6-4544-939f-c298e667769f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.234447] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]5232a1c2-ec8c-2218-97a2-13342ea6eb88, 'name': SearchDatastore_Task, 'duration_secs': 0.008597} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.234447] env[61974]: DEBUG oslo_concurrency.lockutils [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1155.234619] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea/4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1155.234826] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bc7c70ce-1026-4295-b99e-e81437da8aa4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.241338] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1155.241338] env[61974]: value = "task-1379584" [ 1155.241338] env[61974]: _type = "Task" [ 1155.241338] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.248454] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379584, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.429084] env[61974]: DEBUG nova.compute.manager [req-b1874bd5-a5a9-4815-ba41-64c0b3bfe78b req-f365e5fd-f2b2-4a21-87a0-5b501289e832 service nova] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Received event network-changed-46b5260c-16a6-4544-939f-c298e667769f {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1155.429178] env[61974]: DEBUG nova.compute.manager [req-b1874bd5-a5a9-4815-ba41-64c0b3bfe78b req-f365e5fd-f2b2-4a21-87a0-5b501289e832 service nova] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Refreshing instance network info cache due to event network-changed-46b5260c-16a6-4544-939f-c298e667769f. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1155.429384] env[61974]: DEBUG oslo_concurrency.lockutils [req-b1874bd5-a5a9-4815-ba41-64c0b3bfe78b req-f365e5fd-f2b2-4a21-87a0-5b501289e832 service nova] Acquiring lock "refresh_cache-e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1155.666508] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: bdf59e9c-6ce8-4849-8f8c-02d3bf97ad63] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1155.675616] env[61974]: DEBUG oslo_concurrency.lockutils [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Releasing lock "refresh_cache-e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1155.676163] env[61974]: DEBUG nova.compute.manager [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Instance network_info: |[{"id": "46b5260c-16a6-4544-939f-c298e667769f", "address": "fa:16:3e:6e:31:3c", "network": {"id": "be36ebfc-3548-4420-b5b4-b3efb499516a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1190763400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61c671d85b64b28872586c2816b83f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46b5260c-16", "ovs_interfaceid": "46b5260c-16a6-4544-939f-c298e667769f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1155.676559] env[61974]: DEBUG oslo_concurrency.lockutils [req-b1874bd5-a5a9-4815-ba41-64c0b3bfe78b req-f365e5fd-f2b2-4a21-87a0-5b501289e832 service nova] Acquired lock "refresh_cache-e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.676766] env[61974]: DEBUG nova.network.neutron [req-b1874bd5-a5a9-4815-ba41-64c0b3bfe78b req-f365e5fd-f2b2-4a21-87a0-5b501289e832 service nova] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Refreshing network info cache for port 46b5260c-16a6-4544-939f-c298e667769f {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1155.679793] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:31:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e05affa-2640-435e-a124-0ee8a6ab1152', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '46b5260c-16a6-4544-939f-c298e667769f', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1155.686272] env[61974]: DEBUG oslo.service.loopingcall [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1155.687553] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1155.688090] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be79ec17-af94-4de8-ae48-6a1d12db6466 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.709425] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1155.709425] env[61974]: value = "task-1379585" [ 1155.709425] env[61974]: _type = "Task" [ 1155.709425] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.717106] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379585, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.752587] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379584, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.429948} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.752587] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea/4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1155.752840] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1155.753037] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-826a2b54-5e87-4574-a6d2-1b6b0cd6d4ca {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.759799] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1155.759799] env[61974]: value = "task-1379586" [ 1155.759799] env[61974]: _type = "Task" [ 1155.759799] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.767864] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379586, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.987743] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e4cf39a-316c-48cd-aff2-83477f385cf4 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Volume attach. Driver type: vmdk {{(pid=61974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1155.988087] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e4cf39a-316c-48cd-aff2-83477f385cf4 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293046', 'volume_id': 'a1e868e9-b258-4c58-9a5a-001530b1b12a', 'name': 'volume-a1e868e9-b258-4c58-9a5a-001530b1b12a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '93409fd8-c9aa-427b-94b2-93f3db982786', 'attached_at': '', 'detached_at': '', 'volume_id': 'a1e868e9-b258-4c58-9a5a-001530b1b12a', 'serial': 'a1e868e9-b258-4c58-9a5a-001530b1b12a'} {{(pid=61974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1155.989091] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c051f9-9c32-4d2d-93b2-0b58ee158e2d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.005037] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3505a88a-5ebd-4451-be8f-42b164f9676c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.032169] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e4cf39a-316c-48cd-aff2-83477f385cf4 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] volume-a1e868e9-b258-4c58-9a5a-001530b1b12a/volume-a1e868e9-b258-4c58-9a5a-001530b1b12a.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1156.032450] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6a45c1a-5c23-4c71-9c81-6492060c4830 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.049613] env[61974]: DEBUG oslo_vmware.api [None req-2e4cf39a-316c-48cd-aff2-83477f385cf4 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1156.049613] env[61974]: value = "task-1379587" [ 1156.049613] env[61974]: _type = "Task" [ 1156.049613] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.057137] env[61974]: DEBUG oslo_vmware.api [None req-2e4cf39a-316c-48cd-aff2-83477f385cf4 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379587, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.170065] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 912ff104-9c97-4486-99c8-71a35180abb0] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1156.221603] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379585, 'name': CreateVM_Task, 'duration_secs': 0.334949} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.221764] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1156.222403] env[61974]: DEBUG oslo_concurrency.lockutils [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1156.222564] env[61974]: DEBUG oslo_concurrency.lockutils [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.222880] env[61974]: DEBUG oslo_concurrency.lockutils [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1156.223142] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05dc9706-5008-4618-85af-8a88c07ad41c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.228051] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1156.228051] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52423e04-c68a-26a1-db84-d51eee26df8d" [ 1156.228051] env[61974]: _type = "Task" [ 1156.228051] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.235564] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52423e04-c68a-26a1-db84-d51eee26df8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.268422] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379586, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0635} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.270426] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1156.271236] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2feabbda-f669-419b-aca5-47486ad12434 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.293166] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea/4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1156.295524] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13662f08-fc82-48de-99a1-d99bb4e9c608 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.315530] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1156.315530] env[61974]: value = "task-1379588" [ 1156.315530] env[61974]: _type = "Task" [ 1156.315530] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.324857] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379588, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.401793] env[61974]: DEBUG nova.network.neutron [req-b1874bd5-a5a9-4815-ba41-64c0b3bfe78b req-f365e5fd-f2b2-4a21-87a0-5b501289e832 service nova] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Updated VIF entry in instance network info cache for port 46b5260c-16a6-4544-939f-c298e667769f. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1156.402216] env[61974]: DEBUG nova.network.neutron [req-b1874bd5-a5a9-4815-ba41-64c0b3bfe78b req-f365e5fd-f2b2-4a21-87a0-5b501289e832 service nova] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Updating instance_info_cache with network_info: [{"id": "46b5260c-16a6-4544-939f-c298e667769f", "address": "fa:16:3e:6e:31:3c", "network": {"id": "be36ebfc-3548-4420-b5b4-b3efb499516a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1190763400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61c671d85b64b28872586c2816b83f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46b5260c-16", "ovs_interfaceid": "46b5260c-16a6-4544-939f-c298e667769f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1156.560548] env[61974]: DEBUG oslo_vmware.api [None req-2e4cf39a-316c-48cd-aff2-83477f385cf4 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379587, 'name': ReconfigVM_Task, 'duration_secs': 0.414225} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.560835] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e4cf39a-316c-48cd-aff2-83477f385cf4 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Reconfigured VM instance instance-00000068 to attach disk [datastore2] volume-a1e868e9-b258-4c58-9a5a-001530b1b12a/volume-a1e868e9-b258-4c58-9a5a-001530b1b12a.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1156.565579] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8819de79-a1d6-4c50-b39c-c6abb8bca483 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.581512] env[61974]: DEBUG oslo_vmware.api [None req-2e4cf39a-316c-48cd-aff2-83477f385cf4 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1156.581512] env[61974]: value = "task-1379589" [ 1156.581512] env[61974]: _type = "Task" [ 1156.581512] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.591287] env[61974]: DEBUG oslo_vmware.api [None req-2e4cf39a-316c-48cd-aff2-83477f385cf4 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379589, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.673749] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 6e698472-b4c0-45dc-869d-d51bbe00552c] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1156.737632] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52423e04-c68a-26a1-db84-d51eee26df8d, 'name': SearchDatastore_Task, 'duration_secs': 0.009728} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.737946] env[61974]: DEBUG oslo_concurrency.lockutils [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1156.738210] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1156.738461] env[61974]: DEBUG oslo_concurrency.lockutils [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1156.738589] env[61974]: DEBUG oslo_concurrency.lockutils [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.738847] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1156.739203] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70163133-de95-411b-a2a7-90bb02f5cdaa {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.747156] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1156.747339] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1156.748015] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e468587a-0867-409c-b0bc-2696c59bb5f6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.753053] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1156.753053] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52858cf4-af23-cdb9-6b13-b55c901085e6" [ 1156.753053] env[61974]: _type = "Task" [ 1156.753053] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.760471] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52858cf4-af23-cdb9-6b13-b55c901085e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.825209] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379588, 'name': ReconfigVM_Task, 'duration_secs': 0.274002} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.825531] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Reconfigured VM instance instance-0000006b to attach disk [datastore2] 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea/4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1156.826168] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e68cb925-b6cb-457c-9daa-be0ea2fdfc26 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.832409] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1156.832409] env[61974]: value = "task-1379590" [ 1156.832409] env[61974]: _type = "Task" [ 1156.832409] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.839549] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379590, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.905394] env[61974]: DEBUG oslo_concurrency.lockutils [req-b1874bd5-a5a9-4815-ba41-64c0b3bfe78b req-f365e5fd-f2b2-4a21-87a0-5b501289e832 service nova] Releasing lock "refresh_cache-e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1157.092512] env[61974]: DEBUG oslo_vmware.api [None req-2e4cf39a-316c-48cd-aff2-83477f385cf4 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379589, 'name': ReconfigVM_Task, 'duration_secs': 0.137426} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.092897] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e4cf39a-316c-48cd-aff2-83477f385cf4 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293046', 'volume_id': 'a1e868e9-b258-4c58-9a5a-001530b1b12a', 'name': 'volume-a1e868e9-b258-4c58-9a5a-001530b1b12a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '93409fd8-c9aa-427b-94b2-93f3db982786', 'attached_at': '', 'detached_at': '', 'volume_id': 'a1e868e9-b258-4c58-9a5a-001530b1b12a', 'serial': 'a1e868e9-b258-4c58-9a5a-001530b1b12a'} {{(pid=61974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1157.177430] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: a2fbbc4a-92da-4917-a73e-a37a8980c62c] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1157.263027] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52858cf4-af23-cdb9-6b13-b55c901085e6, 'name': SearchDatastore_Task, 'duration_secs': 0.008496} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.263799] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dee8f7fd-db1e-4e2d-b3f6-24f2b0d1e3bc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.269219] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1157.269219] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52620180-660e-ddd2-cd26-9ea3f12b1ad3" [ 1157.269219] env[61974]: _type = "Task" [ 1157.269219] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.276618] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52620180-660e-ddd2-cd26-9ea3f12b1ad3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.340939] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379590, 'name': Rename_Task, 'duration_secs': 0.186971} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.341177] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1157.341413] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-48c26f12-cbe4-4b8e-8cc4-b5ed96893fa6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.347345] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1157.347345] env[61974]: value = "task-1379591" [ 1157.347345] env[61974]: _type = "Task" [ 1157.347345] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.354175] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379591, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.680500] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 603bcf2a-fc99-4ba4-b757-c37d93554870] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1157.780353] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52620180-660e-ddd2-cd26-9ea3f12b1ad3, 'name': SearchDatastore_Task, 'duration_secs': 0.00995} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.780646] env[61974]: DEBUG oslo_concurrency.lockutils [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1157.781020] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] e9cbf858-fb9a-4445-b4b2-3aaf697e83ae/e9cbf858-fb9a-4445-b4b2-3aaf697e83ae.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1157.781313] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f685a41-ebe3-4bc4-9b5c-699af742c444 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.787501] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1157.787501] env[61974]: value = "task-1379592" [ 1157.787501] env[61974]: _type = "Task" [ 1157.787501] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.795026] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379592, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.857072] env[61974]: DEBUG oslo_vmware.api [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379591, 'name': PowerOnVM_Task, 'duration_secs': 0.439293} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.857259] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1157.857504] env[61974]: INFO nova.compute.manager [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Took 6.79 seconds to spawn the instance on the hypervisor. [ 1157.857731] env[61974]: DEBUG nova.compute.manager [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1157.858553] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5410fcc8-6376-4861-8624-e34055eb0a48 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.129659] env[61974]: DEBUG nova.objects.instance [None req-2e4cf39a-316c-48cd-aff2-83477f385cf4 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lazy-loading 'flavor' on Instance uuid 93409fd8-c9aa-427b-94b2-93f3db982786 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1158.185054] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: ef17d87d-31ae-4d08-afba-157521e7d1e3] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1158.297452] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379592, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.45748} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.297716] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] e9cbf858-fb9a-4445-b4b2-3aaf697e83ae/e9cbf858-fb9a-4445-b4b2-3aaf697e83ae.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1158.297922] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1158.298197] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-682eef3d-2814-4cac-b1f3-a869463eff25 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.304326] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1158.304326] env[61974]: value = "task-1379593" [ 1158.304326] env[61974]: _type = "Task" [ 1158.304326] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.311117] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379593, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.374384] env[61974]: INFO nova.compute.manager [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Took 12.34 seconds to build instance. [ 1158.633775] env[61974]: DEBUG oslo_concurrency.lockutils [None req-2e4cf39a-316c-48cd-aff2-83477f385cf4 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "93409fd8-c9aa-427b-94b2-93f3db982786" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.240s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1158.687661] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 1a04b388-8739-4b46-a8e1-cd79835bcf48] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1158.815175] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379593, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063931} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.815519] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1158.816635] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b17ef9-ce28-4634-9dfc-f4dd752f9085 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.841829] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] e9cbf858-fb9a-4445-b4b2-3aaf697e83ae/e9cbf858-fb9a-4445-b4b2-3aaf697e83ae.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1158.843098] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06b42c22-b4fb-49bc-811a-7e0e47f423e4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.864111] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1158.864111] env[61974]: value = "task-1379594" [ 1158.864111] env[61974]: _type = "Task" [ 1158.864111] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.872370] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379594, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.877037] env[61974]: DEBUG oslo_concurrency.lockutils [None req-822b149f-de52-413b-8734-c63fb8061a40 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.855s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1158.960819] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d70359b-c4e7-4ed4-bbd2-9fa154b7ae06 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "93409fd8-c9aa-427b-94b2-93f3db982786" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.961284] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d70359b-c4e7-4ed4-bbd2-9fa154b7ae06 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "93409fd8-c9aa-427b-94b2-93f3db982786" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1159.180231] env[61974]: DEBUG nova.compute.manager [req-1b7cca72-0a0a-4c31-95ef-d5058c51b579 req-6290292c-1757-4840-b6c7-fb31892827d5 service nova] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Received event network-changed-919720aa-453c-436b-83f3-2f0181f8391e {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1159.180231] env[61974]: DEBUG nova.compute.manager [req-1b7cca72-0a0a-4c31-95ef-d5058c51b579 req-6290292c-1757-4840-b6c7-fb31892827d5 service nova] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Refreshing instance network info cache due to event network-changed-919720aa-453c-436b-83f3-2f0181f8391e. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1159.180452] env[61974]: DEBUG oslo_concurrency.lockutils [req-1b7cca72-0a0a-4c31-95ef-d5058c51b579 req-6290292c-1757-4840-b6c7-fb31892827d5 service nova] Acquiring lock "refresh_cache-4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1159.180603] env[61974]: DEBUG oslo_concurrency.lockutils [req-1b7cca72-0a0a-4c31-95ef-d5058c51b579 req-6290292c-1757-4840-b6c7-fb31892827d5 service nova] Acquired lock "refresh_cache-4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.180766] env[61974]: DEBUG nova.network.neutron [req-1b7cca72-0a0a-4c31-95ef-d5058c51b579 req-6290292c-1757-4840-b6c7-fb31892827d5 service nova] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Refreshing network info cache for port 919720aa-453c-436b-83f3-2f0181f8391e {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1159.189962] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 9c26e20b-dfc4-432c-a851-499dbea18f01] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1159.377642] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379594, 'name': ReconfigVM_Task, 'duration_secs': 0.245221} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.378130] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Reconfigured VM instance instance-0000006c to attach disk [datastore2] e9cbf858-fb9a-4445-b4b2-3aaf697e83ae/e9cbf858-fb9a-4445-b4b2-3aaf697e83ae.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1159.379161] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-08b98447-5327-4944-b2ad-71e5df286760 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.386640] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1159.386640] env[61974]: value = "task-1379595" [ 1159.386640] env[61974]: _type = "Task" [ 1159.386640] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.397139] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379595, 'name': Rename_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.464755] env[61974]: INFO nova.compute.manager [None req-6d70359b-c4e7-4ed4-bbd2-9fa154b7ae06 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Detaching volume 18a01aaf-194f-482b-8bbf-d6f159cf5f96 [ 1159.495936] env[61974]: INFO nova.virt.block_device [None req-6d70359b-c4e7-4ed4-bbd2-9fa154b7ae06 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Attempting to driver detach volume 18a01aaf-194f-482b-8bbf-d6f159cf5f96 from mountpoint /dev/sdb [ 1159.496231] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d70359b-c4e7-4ed4-bbd2-9fa154b7ae06 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Volume detach. Driver type: vmdk {{(pid=61974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1159.496403] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d70359b-c4e7-4ed4-bbd2-9fa154b7ae06 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293045', 'volume_id': '18a01aaf-194f-482b-8bbf-d6f159cf5f96', 'name': 'volume-18a01aaf-194f-482b-8bbf-d6f159cf5f96', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '93409fd8-c9aa-427b-94b2-93f3db982786', 'attached_at': '', 'detached_at': '', 'volume_id': '18a01aaf-194f-482b-8bbf-d6f159cf5f96', 'serial': '18a01aaf-194f-482b-8bbf-d6f159cf5f96'} {{(pid=61974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1159.497325] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330ecdc2-dd90-44ff-a347-0e36b22ac5b1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.522353] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d4cea5-cd81-4da5-b1bc-9e44924cd569 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.529161] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4ca5a75-252e-4962-847c-fc446c9ddc77 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.553708] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7140f431-fba9-40c6-b511-815d07605e1b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.568446] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d70359b-c4e7-4ed4-bbd2-9fa154b7ae06 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] The volume has not been displaced from its original location: [datastore2] volume-18a01aaf-194f-482b-8bbf-d6f159cf5f96/volume-18a01aaf-194f-482b-8bbf-d6f159cf5f96.vmdk. No consolidation needed. {{(pid=61974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1159.573814] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d70359b-c4e7-4ed4-bbd2-9fa154b7ae06 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Reconfiguring VM instance instance-00000068 to detach disk 2001 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1159.574141] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2ec3b4a-5ff2-422e-85a0-64174f04d4d5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.593157] env[61974]: DEBUG oslo_vmware.api [None req-6d70359b-c4e7-4ed4-bbd2-9fa154b7ae06 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1159.593157] env[61974]: value = "task-1379596" [ 1159.593157] env[61974]: _type = "Task" [ 1159.593157] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.599898] env[61974]: DEBUG oslo_vmware.api [None req-6d70359b-c4e7-4ed4-bbd2-9fa154b7ae06 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379596, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.692399] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: e6feee04-8aae-4151-8187-3ef4885bcf73] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1159.896491] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379595, 'name': Rename_Task, 'duration_secs': 0.372147} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.896788] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1159.897064] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-30ad3542-58dc-4268-9a1c-e47f6dfe8c74 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.902788] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1159.902788] env[61974]: value = "task-1379597" [ 1159.902788] env[61974]: _type = "Task" [ 1159.902788] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.907200] env[61974]: DEBUG nova.network.neutron [req-1b7cca72-0a0a-4c31-95ef-d5058c51b579 req-6290292c-1757-4840-b6c7-fb31892827d5 service nova] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Updated VIF entry in instance network info cache for port 919720aa-453c-436b-83f3-2f0181f8391e. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1159.907612] env[61974]: DEBUG nova.network.neutron [req-1b7cca72-0a0a-4c31-95ef-d5058c51b579 req-6290292c-1757-4840-b6c7-fb31892827d5 service nova] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Updating instance_info_cache with network_info: [{"id": "919720aa-453c-436b-83f3-2f0181f8391e", "address": "fa:16:3e:0e:7a:19", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap919720aa-45", "ovs_interfaceid": "919720aa-453c-436b-83f3-2f0181f8391e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.911612] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379597, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.103247] env[61974]: DEBUG oslo_vmware.api [None req-6d70359b-c4e7-4ed4-bbd2-9fa154b7ae06 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379596, 'name': ReconfigVM_Task, 'duration_secs': 0.277462} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.103613] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d70359b-c4e7-4ed4-bbd2-9fa154b7ae06 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Reconfigured VM instance instance-00000068 to detach disk 2001 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1160.109213] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c282e665-e9ce-49d9-bbb1-a9ff2ee70327 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.126240] env[61974]: DEBUG oslo_vmware.api [None req-6d70359b-c4e7-4ed4-bbd2-9fa154b7ae06 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1160.126240] env[61974]: value = "task-1379598" [ 1160.126240] env[61974]: _type = "Task" [ 1160.126240] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.135739] env[61974]: DEBUG oslo_vmware.api [None req-6d70359b-c4e7-4ed4-bbd2-9fa154b7ae06 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379598, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.196237] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 5780d1d6-cd40-4b97-8a68-072c090540af] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1160.415514] env[61974]: DEBUG oslo_concurrency.lockutils [req-1b7cca72-0a0a-4c31-95ef-d5058c51b579 req-6290292c-1757-4840-b6c7-fb31892827d5 service nova] Releasing lock "refresh_cache-4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1160.415930] env[61974]: DEBUG oslo_vmware.api [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379597, 'name': PowerOnVM_Task, 'duration_secs': 0.483276} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.416204] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1160.416418] env[61974]: INFO nova.compute.manager [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Took 7.18 seconds to spawn the instance on the hypervisor. [ 1160.416597] env[61974]: DEBUG nova.compute.manager [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1160.417442] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab5d708-a168-4125-9ded-c4d3f04852ef {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.636543] env[61974]: DEBUG oslo_vmware.api [None req-6d70359b-c4e7-4ed4-bbd2-9fa154b7ae06 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379598, 'name': ReconfigVM_Task, 'duration_secs': 0.149825} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.636857] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d70359b-c4e7-4ed4-bbd2-9fa154b7ae06 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293045', 'volume_id': '18a01aaf-194f-482b-8bbf-d6f159cf5f96', 'name': 'volume-18a01aaf-194f-482b-8bbf-d6f159cf5f96', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '93409fd8-c9aa-427b-94b2-93f3db982786', 'attached_at': '', 'detached_at': '', 'volume_id': '18a01aaf-194f-482b-8bbf-d6f159cf5f96', 'serial': '18a01aaf-194f-482b-8bbf-d6f159cf5f96'} {{(pid=61974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1160.699967] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: abe0168a-e838-468a-a223-7c2a64497c0c] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1160.938843] env[61974]: INFO nova.compute.manager [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Took 13.47 seconds to build instance. [ 1161.184534] env[61974]: DEBUG nova.objects.instance [None req-6d70359b-c4e7-4ed4-bbd2-9fa154b7ae06 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lazy-loading 'flavor' on Instance uuid 93409fd8-c9aa-427b-94b2-93f3db982786 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1161.202899] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 90f8acb1-a0b5-4459-a9d7-c12f652b0b51] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1161.441709] env[61974]: DEBUG oslo_concurrency.lockutils [None req-050d2d9d-9a49-49c0-99df-a619ad6cb94a tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.981s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1161.711022] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 68ad5903-e502-406b-a19e-9e4c28aa5035] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1161.803045] env[61974]: DEBUG nova.compute.manager [req-242c4296-fad4-4f68-89bb-1b2115d11789 req-b6dc9632-37a3-4b19-ab5e-bd2c62f75c20 service nova] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Received event network-changed-46b5260c-16a6-4544-939f-c298e667769f {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1161.803309] env[61974]: DEBUG nova.compute.manager [req-242c4296-fad4-4f68-89bb-1b2115d11789 req-b6dc9632-37a3-4b19-ab5e-bd2c62f75c20 service nova] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Refreshing instance network info cache due to event network-changed-46b5260c-16a6-4544-939f-c298e667769f. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1161.803469] env[61974]: DEBUG oslo_concurrency.lockutils [req-242c4296-fad4-4f68-89bb-1b2115d11789 req-b6dc9632-37a3-4b19-ab5e-bd2c62f75c20 service nova] Acquiring lock "refresh_cache-e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1161.803612] env[61974]: DEBUG oslo_concurrency.lockutils [req-242c4296-fad4-4f68-89bb-1b2115d11789 req-b6dc9632-37a3-4b19-ab5e-bd2c62f75c20 service nova] Acquired lock "refresh_cache-e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.803771] env[61974]: DEBUG nova.network.neutron [req-242c4296-fad4-4f68-89bb-1b2115d11789 req-b6dc9632-37a3-4b19-ab5e-bd2c62f75c20 service nova] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Refreshing network info cache for port 46b5260c-16a6-4544-939f-c298e667769f {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1162.191808] env[61974]: DEBUG oslo_concurrency.lockutils [None req-6d70359b-c4e7-4ed4-bbd2-9fa154b7ae06 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "93409fd8-c9aa-427b-94b2-93f3db982786" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.230s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1162.207204] env[61974]: DEBUG oslo_concurrency.lockutils [None req-15ff379a-ff56-4b11-ba64-8df9d03cece1 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "93409fd8-c9aa-427b-94b2-93f3db982786" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1162.207623] env[61974]: DEBUG oslo_concurrency.lockutils [None req-15ff379a-ff56-4b11-ba64-8df9d03cece1 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "93409fd8-c9aa-427b-94b2-93f3db982786" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1162.212698] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 450956aa-cc55-481c-acf6-287abc8b8efe] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1162.563909] env[61974]: DEBUG nova.network.neutron [req-242c4296-fad4-4f68-89bb-1b2115d11789 req-b6dc9632-37a3-4b19-ab5e-bd2c62f75c20 service nova] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Updated VIF entry in instance network info cache for port 46b5260c-16a6-4544-939f-c298e667769f. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1162.564306] env[61974]: DEBUG nova.network.neutron [req-242c4296-fad4-4f68-89bb-1b2115d11789 req-b6dc9632-37a3-4b19-ab5e-bd2c62f75c20 service nova] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Updating instance_info_cache with network_info: [{"id": "46b5260c-16a6-4544-939f-c298e667769f", "address": "fa:16:3e:6e:31:3c", "network": {"id": "be36ebfc-3548-4420-b5b4-b3efb499516a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1190763400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61c671d85b64b28872586c2816b83f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46b5260c-16", "ovs_interfaceid": "46b5260c-16a6-4544-939f-c298e667769f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.711050] env[61974]: INFO nova.compute.manager [None req-15ff379a-ff56-4b11-ba64-8df9d03cece1 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Detaching volume a1e868e9-b258-4c58-9a5a-001530b1b12a [ 1162.714853] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: c06a7599-58e8-4796-9e95-d96327f649d0] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1162.745185] env[61974]: INFO nova.virt.block_device [None req-15ff379a-ff56-4b11-ba64-8df9d03cece1 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Attempting to driver detach volume a1e868e9-b258-4c58-9a5a-001530b1b12a from mountpoint /dev/sdc [ 1162.745502] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-15ff379a-ff56-4b11-ba64-8df9d03cece1 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Volume detach. Driver type: vmdk {{(pid=61974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1162.745733] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-15ff379a-ff56-4b11-ba64-8df9d03cece1 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293046', 'volume_id': 'a1e868e9-b258-4c58-9a5a-001530b1b12a', 'name': 'volume-a1e868e9-b258-4c58-9a5a-001530b1b12a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '93409fd8-c9aa-427b-94b2-93f3db982786', 'attached_at': '', 'detached_at': '', 'volume_id': 'a1e868e9-b258-4c58-9a5a-001530b1b12a', 'serial': 'a1e868e9-b258-4c58-9a5a-001530b1b12a'} {{(pid=61974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1162.746728] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25596a71-cbb4-4a6b-9b65-0ab7d9da11fb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.769446] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e2a0f0-4a4a-4052-972c-4eaf146d72ca {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.776198] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a18958-8694-4241-bcc1-a01c2a9eaa74 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.797192] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff0a304-18c0-4cfc-9a04-38fdfef24e62 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.811174] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-15ff379a-ff56-4b11-ba64-8df9d03cece1 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] The volume has not been displaced from its original location: [datastore2] volume-a1e868e9-b258-4c58-9a5a-001530b1b12a/volume-a1e868e9-b258-4c58-9a5a-001530b1b12a.vmdk. No consolidation needed. {{(pid=61974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1162.816331] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-15ff379a-ff56-4b11-ba64-8df9d03cece1 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Reconfiguring VM instance instance-00000068 to detach disk 2002 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1162.816562] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-052a6f32-cfd8-49a5-abea-81364185c5db {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.834759] env[61974]: DEBUG oslo_vmware.api [None req-15ff379a-ff56-4b11-ba64-8df9d03cece1 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1162.834759] env[61974]: value = "task-1379600" [ 1162.834759] env[61974]: _type = "Task" [ 1162.834759] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.843980] env[61974]: DEBUG oslo_vmware.api [None req-15ff379a-ff56-4b11-ba64-8df9d03cece1 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379600, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.067499] env[61974]: DEBUG oslo_concurrency.lockutils [req-242c4296-fad4-4f68-89bb-1b2115d11789 req-b6dc9632-37a3-4b19-ab5e-bd2c62f75c20 service nova] Releasing lock "refresh_cache-e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1163.220455] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: a6cc4d1d-cdd6-4015-9b2f-b4b8ac3322ec] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1163.345300] env[61974]: DEBUG oslo_vmware.api [None req-15ff379a-ff56-4b11-ba64-8df9d03cece1 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379600, 'name': ReconfigVM_Task, 'duration_secs': 0.305721} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.345557] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-15ff379a-ff56-4b11-ba64-8df9d03cece1 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Reconfigured VM instance instance-00000068 to detach disk 2002 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1163.350574] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fdc78914-6df7-40e0-8269-5f953b809b1b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.366577] env[61974]: DEBUG oslo_vmware.api [None req-15ff379a-ff56-4b11-ba64-8df9d03cece1 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1163.366577] env[61974]: value = "task-1379602" [ 1163.366577] env[61974]: _type = "Task" [ 1163.366577] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.375326] env[61974]: DEBUG oslo_vmware.api [None req-15ff379a-ff56-4b11-ba64-8df9d03cece1 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379602, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.724559] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: eb6dfd21-0ba6-455c-b14e-80dacaf6b92c] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1163.876272] env[61974]: DEBUG oslo_vmware.api [None req-15ff379a-ff56-4b11-ba64-8df9d03cece1 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379602, 'name': ReconfigVM_Task, 'duration_secs': 0.146132} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.876576] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-15ff379a-ff56-4b11-ba64-8df9d03cece1 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293046', 'volume_id': 'a1e868e9-b258-4c58-9a5a-001530b1b12a', 'name': 'volume-a1e868e9-b258-4c58-9a5a-001530b1b12a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '93409fd8-c9aa-427b-94b2-93f3db982786', 'attached_at': '', 'detached_at': '', 'volume_id': 'a1e868e9-b258-4c58-9a5a-001530b1b12a', 'serial': 'a1e868e9-b258-4c58-9a5a-001530b1b12a'} {{(pid=61974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1164.228208] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: ceb0dd02-6441-4923-99f6-73f8eab86fe5] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1164.420080] env[61974]: DEBUG nova.objects.instance [None req-15ff379a-ff56-4b11-ba64-8df9d03cece1 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lazy-loading 'flavor' on Instance uuid 93409fd8-c9aa-427b-94b2-93f3db982786 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1164.731427] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 1e7e63dd-4d6c-43d3-881f-e96ac9e90bbb] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1165.236061] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 45fda940-b7f0-410c-b31a-b5cd365c28fe] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1165.428665] env[61974]: DEBUG oslo_concurrency.lockutils [None req-15ff379a-ff56-4b11-ba64-8df9d03cece1 tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "93409fd8-c9aa-427b-94b2-93f3db982786" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.221s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1165.738739] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 097ad079-9712-4183-9135-b15ad3a65d6d] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1166.242839] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 0ce75511-290c-4fea-9657-dfdd8d9efc4b] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1166.607583] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "93409fd8-c9aa-427b-94b2-93f3db982786" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.607888] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "93409fd8-c9aa-427b-94b2-93f3db982786" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.608142] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "93409fd8-c9aa-427b-94b2-93f3db982786-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.608346] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "93409fd8-c9aa-427b-94b2-93f3db982786-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.608523] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "93409fd8-c9aa-427b-94b2-93f3db982786-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.610774] env[61974]: INFO nova.compute.manager [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Terminating instance [ 1166.612667] env[61974]: DEBUG nova.compute.manager [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1166.612867] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1166.613732] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b26caa-4aa6-4cd4-9e82-a8d9f4005aa0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.621831] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1166.622392] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-40150ab2-249d-4c56-a0c9-2b72b6565269 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.628239] env[61974]: DEBUG oslo_vmware.api [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1166.628239] env[61974]: value = "task-1379604" [ 1166.628239] env[61974]: _type = "Task" [ 1166.628239] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.635604] env[61974]: DEBUG oslo_vmware.api [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379604, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.746529] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 7b338210-5be8-4838-b815-8f2c6cc19ccd] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1167.138339] env[61974]: DEBUG oslo_vmware.api [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379604, 'name': PowerOffVM_Task, 'duration_secs': 0.18185} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.138650] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1167.138833] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1167.139102] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef5662a0-ea34-4313-9e1b-b88fa9916f33 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.202051] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1167.202419] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1167.202419] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Deleting the datastore file [datastore2] 93409fd8-c9aa-427b-94b2-93f3db982786 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1167.202714] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f737f964-ab06-4ec0-81f6-fde4423af243 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.209364] env[61974]: DEBUG oslo_vmware.api [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for the task: (returnval){ [ 1167.209364] env[61974]: value = "task-1379607" [ 1167.209364] env[61974]: _type = "Task" [ 1167.209364] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.216879] env[61974]: DEBUG oslo_vmware.api [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379607, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.250639] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: b1fa5433-8f26-48db-a19d-d1e11245fb44] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1167.719322] env[61974]: DEBUG oslo_vmware.api [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Task: {'id': task-1379607, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133681} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.719545] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1167.719712] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1167.719889] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1167.720101] env[61974]: INFO nova.compute.manager [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1167.720354] env[61974]: DEBUG oslo.service.loopingcall [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1167.720557] env[61974]: DEBUG nova.compute.manager [-] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1167.720652] env[61974]: DEBUG nova.network.neutron [-] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1167.753958] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 1aa2a63c-e352-4c9b-9445-9b45bf3ae14c] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1168.150163] env[61974]: DEBUG nova.compute.manager [req-622aa576-a2bc-4f54-9f7e-598ec5da32f2 req-d63ae3ac-9635-4342-becb-40504c5b0d76 service nova] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Received event network-vif-deleted-670c53d8-5b5e-412b-9af9-48b50c98a404 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1168.150293] env[61974]: INFO nova.compute.manager [req-622aa576-a2bc-4f54-9f7e-598ec5da32f2 req-d63ae3ac-9635-4342-becb-40504c5b0d76 service nova] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Neutron deleted interface 670c53d8-5b5e-412b-9af9-48b50c98a404; detaching it from the instance and deleting it from the info cache [ 1168.150412] env[61974]: DEBUG nova.network.neutron [req-622aa576-a2bc-4f54-9f7e-598ec5da32f2 req-d63ae3ac-9635-4342-becb-40504c5b0d76 service nova] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1168.257071] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 1c1404fd-a954-4849-883b-7898a7e87e2b] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1168.627041] env[61974]: DEBUG nova.network.neutron [-] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1168.652944] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-17e6811e-9814-4af1-a610-04ac01b2338c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.662568] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87215044-6c75-4532-8948-9c1ffcd7b553 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.688229] env[61974]: DEBUG nova.compute.manager [req-622aa576-a2bc-4f54-9f7e-598ec5da32f2 req-d63ae3ac-9635-4342-becb-40504c5b0d76 service nova] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Detach interface failed, port_id=670c53d8-5b5e-412b-9af9-48b50c98a404, reason: Instance 93409fd8-c9aa-427b-94b2-93f3db982786 could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1168.760760] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: f0601d26-4e29-4946-bb52-50e2a2163535] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1169.130129] env[61974]: INFO nova.compute.manager [-] [instance: 93409fd8-c9aa-427b-94b2-93f3db982786] Took 1.41 seconds to deallocate network for instance. [ 1169.264076] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: f88f0ef2-24f2-4eef-92a3-8de2ebb6944a] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1169.637358] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1169.637641] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1169.637870] env[61974]: DEBUG nova.objects.instance [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lazy-loading 'resources' on Instance uuid 93409fd8-c9aa-427b-94b2-93f3db982786 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1169.767556] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 635f362a-582e-44bc-85d8-8a69943982b0] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1170.201403] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d7c91c9-0a06-4607-b57e-a2f6662feed3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.209460] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef37d9b-f034-467d-90c2-d13ad8c2d150 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.241172] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5b916d-b535-4256-9152-3cf15f715420 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.248646] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5894b0-9f8b-434f-a05b-79300d0c9923 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.261806] env[61974]: DEBUG nova.compute.provider_tree [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1170.271132] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: a9edbd98-3e67-476b-934d-15d893a62d02] Instance has had 0 of 5 cleanup attempts {{(pid=61974) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1170.791388] env[61974]: DEBUG nova.scheduler.client.report [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Updated inventory for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with generation 131 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1170.791781] env[61974]: DEBUG nova.compute.provider_tree [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Updating resource provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a generation from 131 to 132 during operation: update_inventory {{(pid=61974) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1170.792061] env[61974]: DEBUG nova.compute.provider_tree [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1171.297266] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.659s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.315687] env[61974]: INFO nova.scheduler.client.report [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Deleted allocations for instance 93409fd8-c9aa-427b-94b2-93f3db982786 [ 1171.756598] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager.update_available_resource {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1171.824807] env[61974]: DEBUG oslo_concurrency.lockutils [None req-32dc5ac5-e04c-494f-8c04-26f4f04ab81e tempest-AttachVolumeTestJSON-1601427499 tempest-AttachVolumeTestJSON-1601427499-project-member] Lock "93409fd8-c9aa-427b-94b2-93f3db982786" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.217s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1172.260164] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1172.260510] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1172.260764] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1172.260997] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1172.262766] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8213291d-2271-4b90-82f9-29c3de3150f2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.271213] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35189069-5f1a-487e-8b8e-9a673264064d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.284757] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13082e50-0298-4175-920b-254685511b3e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.290871] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8270e5-9580-4923-995f-02a34445c305 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.318967] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180712MB free_disk=177GB free_vcpus=48 pci_devices=None {{(pid=61974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1172.319096] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1172.319281] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1173.348732] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 59c72be0-46de-4cb8-93d6-0a2c70c90e2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1173.349014] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1173.349243] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance e9cbf858-fb9a-4445-b4b2-3aaf697e83ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1173.349531] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1173.349770] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1173.403361] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f5f744-4853-4487-a8bd-b24e0ca11f40 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.413616] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b5f808-569e-42d2-8eb6-a0ea2ebe0745 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.462369] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0389870-a912-4ae0-84c6-dc4e3daf488d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.473221] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468b6de1-de2a-47dc-8f9b-8e9afd8e87ae {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.495472] env[61974]: DEBUG nova.compute.provider_tree [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1173.999581] env[61974]: DEBUG nova.scheduler.client.report [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1174.504971] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1174.504971] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.186s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1178.541068] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "ed436bec-b992-45ae-8d17-445a594b1e68" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1178.541390] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "ed436bec-b992-45ae-8d17-445a594b1e68" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.043409] env[61974]: DEBUG nova.compute.manager [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1179.564598] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.564598] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.565922] env[61974]: INFO nova.compute.claims [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1180.637672] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb68cf0-bb13-44bf-be60-5359fa08a16b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.646155] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ba5fca-fe95-4877-b24e-d632d47e3470 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.687256] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6951322c-d84d-4593-b0bd-bd2f42c85bd9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.695333] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58605b43-c4d8-487b-8b79-1e19a8d6a8fc {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.708850] env[61974]: DEBUG nova.compute.provider_tree [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1181.212628] env[61974]: DEBUG nova.scheduler.client.report [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1181.717901] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.153s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.718335] env[61974]: DEBUG nova.compute.manager [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1182.222896] env[61974]: DEBUG nova.compute.utils [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1182.224366] env[61974]: DEBUG nova.compute.manager [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1182.225240] env[61974]: DEBUG nova.network.neutron [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1182.291678] env[61974]: DEBUG nova.policy [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4896588cebd84071a573046de7006429', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2db6af28263c40708c2466226ce03009', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1182.552235] env[61974]: DEBUG nova.network.neutron [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Successfully created port: 6256620b-c9e6-4953-98c3-414683e8df1a {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1182.731198] env[61974]: DEBUG nova.compute.manager [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1183.235252] env[61974]: INFO nova.virt.block_device [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Booting with volume e6a79d42-7c85-4a6f-a881-fb861c033343 at /dev/sda [ 1183.270275] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a6d5fa7-f01a-4cf9-9919-2b048566826d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.280009] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620ad5fe-2c57-4410-9d21-c1bf41839bc1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.307384] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-32308f3e-d7d7-42ae-8f43-7bb4ca48e78d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.315585] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9807547b-aba0-4473-9eae-11e1fb494293 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.341708] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b309acc-e948-4973-befb-590806807827 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.348539] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f17e0817-d886-4b3f-8190-51320bdf3247 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.362841] env[61974]: DEBUG nova.virt.block_device [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Updating existing volume attachment record: 3cd805fc-2203-4005-8344-7b785584fd03 {{(pid=61974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1183.927597] env[61974]: DEBUG nova.compute.manager [req-eb4532f6-a33c-498f-be30-441c7a988148 req-6503c31c-6156-44b1-8a40-324dc49a4ed0 service nova] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Received event network-vif-plugged-6256620b-c9e6-4953-98c3-414683e8df1a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1183.928895] env[61974]: DEBUG oslo_concurrency.lockutils [req-eb4532f6-a33c-498f-be30-441c7a988148 req-6503c31c-6156-44b1-8a40-324dc49a4ed0 service nova] Acquiring lock "ed436bec-b992-45ae-8d17-445a594b1e68-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1183.928895] env[61974]: DEBUG oslo_concurrency.lockutils [req-eb4532f6-a33c-498f-be30-441c7a988148 req-6503c31c-6156-44b1-8a40-324dc49a4ed0 service nova] Lock "ed436bec-b992-45ae-8d17-445a594b1e68-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1183.928895] env[61974]: DEBUG oslo_concurrency.lockutils [req-eb4532f6-a33c-498f-be30-441c7a988148 req-6503c31c-6156-44b1-8a40-324dc49a4ed0 service nova] Lock "ed436bec-b992-45ae-8d17-445a594b1e68-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1183.928895] env[61974]: DEBUG nova.compute.manager [req-eb4532f6-a33c-498f-be30-441c7a988148 req-6503c31c-6156-44b1-8a40-324dc49a4ed0 service nova] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] No waiting events found dispatching network-vif-plugged-6256620b-c9e6-4953-98c3-414683e8df1a {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1183.929959] env[61974]: WARNING nova.compute.manager [req-eb4532f6-a33c-498f-be30-441c7a988148 req-6503c31c-6156-44b1-8a40-324dc49a4ed0 service nova] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Received unexpected event network-vif-plugged-6256620b-c9e6-4953-98c3-414683e8df1a for instance with vm_state building and task_state block_device_mapping. [ 1184.010379] env[61974]: DEBUG nova.network.neutron [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Successfully updated port: 6256620b-c9e6-4953-98c3-414683e8df1a {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1184.512675] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "refresh_cache-ed436bec-b992-45ae-8d17-445a594b1e68" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1184.512838] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired lock "refresh_cache-ed436bec-b992-45ae-8d17-445a594b1e68" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.512981] env[61974]: DEBUG nova.network.neutron [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1185.044853] env[61974]: DEBUG nova.network.neutron [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1185.169615] env[61974]: DEBUG nova.network.neutron [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Updating instance_info_cache with network_info: [{"id": "6256620b-c9e6-4953-98c3-414683e8df1a", "address": "fa:16:3e:dc:68:0c", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6256620b-c9", "ovs_interfaceid": "6256620b-c9e6-4953-98c3-414683e8df1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1185.444584] env[61974]: DEBUG nova.compute.manager [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1185.445172] env[61974]: DEBUG nova.virt.hardware [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1185.445398] env[61974]: DEBUG nova.virt.hardware [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1185.445595] env[61974]: DEBUG nova.virt.hardware [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1185.445789] env[61974]: DEBUG nova.virt.hardware [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1185.445940] env[61974]: DEBUG nova.virt.hardware [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1185.446105] env[61974]: DEBUG nova.virt.hardware [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1185.446325] env[61974]: DEBUG nova.virt.hardware [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1185.446499] env[61974]: DEBUG nova.virt.hardware [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1185.446679] env[61974]: DEBUG nova.virt.hardware [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1185.446849] env[61974]: DEBUG nova.virt.hardware [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1185.447037] env[61974]: DEBUG nova.virt.hardware [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1185.447905] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95355d68-60a1-4565-ae23-7806d25bb486 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.456350] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d42a2caf-e17e-424b-8473-6242b8a8aead {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.672912] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Releasing lock "refresh_cache-ed436bec-b992-45ae-8d17-445a594b1e68" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1185.673280] env[61974]: DEBUG nova.compute.manager [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Instance network_info: |[{"id": "6256620b-c9e6-4953-98c3-414683e8df1a", "address": "fa:16:3e:dc:68:0c", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6256620b-c9", "ovs_interfaceid": "6256620b-c9e6-4953-98c3-414683e8df1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1185.673747] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:68:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c791d09c-1086-4ee1-bcde-6ca7d259cabd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6256620b-c9e6-4953-98c3-414683e8df1a', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1185.681339] env[61974]: DEBUG oslo.service.loopingcall [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1185.681559] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1185.681789] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b6cdab4-2bc6-404c-b1f9-6c9e40a729f3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.702049] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1185.702049] env[61974]: value = "task-1379615" [ 1185.702049] env[61974]: _type = "Task" [ 1185.702049] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.714494] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379615, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.954242] env[61974]: DEBUG nova.compute.manager [req-1ec92478-d63b-4297-9551-d5e9dbe3d871 req-9289e344-3988-47bc-a875-41d793b704af service nova] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Received event network-changed-6256620b-c9e6-4953-98c3-414683e8df1a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1185.954428] env[61974]: DEBUG nova.compute.manager [req-1ec92478-d63b-4297-9551-d5e9dbe3d871 req-9289e344-3988-47bc-a875-41d793b704af service nova] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Refreshing instance network info cache due to event network-changed-6256620b-c9e6-4953-98c3-414683e8df1a. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1185.954697] env[61974]: DEBUG oslo_concurrency.lockutils [req-1ec92478-d63b-4297-9551-d5e9dbe3d871 req-9289e344-3988-47bc-a875-41d793b704af service nova] Acquiring lock "refresh_cache-ed436bec-b992-45ae-8d17-445a594b1e68" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1185.954871] env[61974]: DEBUG oslo_concurrency.lockutils [req-1ec92478-d63b-4297-9551-d5e9dbe3d871 req-9289e344-3988-47bc-a875-41d793b704af service nova] Acquired lock "refresh_cache-ed436bec-b992-45ae-8d17-445a594b1e68" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.955085] env[61974]: DEBUG nova.network.neutron [req-1ec92478-d63b-4297-9551-d5e9dbe3d871 req-9289e344-3988-47bc-a875-41d793b704af service nova] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Refreshing network info cache for port 6256620b-c9e6-4953-98c3-414683e8df1a {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1186.212098] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379615, 'name': CreateVM_Task, 'duration_secs': 0.394276} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.212442] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1186.212923] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': '3cd805fc-2203-4005-8344-7b785584fd03', 'delete_on_termination': True, 'device_type': None, 'mount_device': '/dev/sda', 'guest_format': None, 'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293050', 'volume_id': 'e6a79d42-7c85-4a6f-a881-fb861c033343', 'name': 'volume-e6a79d42-7c85-4a6f-a881-fb861c033343', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ed436bec-b992-45ae-8d17-445a594b1e68', 'attached_at': '', 'detached_at': '', 'volume_id': 'e6a79d42-7c85-4a6f-a881-fb861c033343', 'serial': 'e6a79d42-7c85-4a6f-a881-fb861c033343'}, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=61974) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1186.213164] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Root volume attach. Driver type: vmdk {{(pid=61974) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1186.213924] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b867f212-c6a9-4eb2-bccb-077e281638c4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.221476] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad4d34b-ea41-4994-aa53-89104e742b10 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.227511] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e424714-5910-4bd5-8cf1-db7d43330bc5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.233010] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-6bb0cb54-2713-4c72-8d19-7f8cc04299dd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.239276] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1186.239276] env[61974]: value = "task-1379616" [ 1186.239276] env[61974]: _type = "Task" [ 1186.239276] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.246295] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379616, 'name': RelocateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.702325] env[61974]: DEBUG nova.network.neutron [req-1ec92478-d63b-4297-9551-d5e9dbe3d871 req-9289e344-3988-47bc-a875-41d793b704af service nova] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Updated VIF entry in instance network info cache for port 6256620b-c9e6-4953-98c3-414683e8df1a. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1186.702773] env[61974]: DEBUG nova.network.neutron [req-1ec92478-d63b-4297-9551-d5e9dbe3d871 req-9289e344-3988-47bc-a875-41d793b704af service nova] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Updating instance_info_cache with network_info: [{"id": "6256620b-c9e6-4953-98c3-414683e8df1a", "address": "fa:16:3e:dc:68:0c", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6256620b-c9", "ovs_interfaceid": "6256620b-c9e6-4953-98c3-414683e8df1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.749991] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379616, 'name': RelocateVM_Task} progress is 42%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.205515] env[61974]: DEBUG oslo_concurrency.lockutils [req-1ec92478-d63b-4297-9551-d5e9dbe3d871 req-9289e344-3988-47bc-a875-41d793b704af service nova] Releasing lock "refresh_cache-ed436bec-b992-45ae-8d17-445a594b1e68" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1187.249846] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379616, 'name': RelocateVM_Task} progress is 58%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.751128] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379616, 'name': RelocateVM_Task} progress is 73%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.251804] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379616, 'name': RelocateVM_Task} progress is 88%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.753461] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379616, 'name': RelocateVM_Task} progress is 97%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.253120] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379616, 'name': RelocateVM_Task, 'duration_secs': 2.851212} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.253120] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Volume attach. Driver type: vmdk {{(pid=61974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1189.253450] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293050', 'volume_id': 'e6a79d42-7c85-4a6f-a881-fb861c033343', 'name': 'volume-e6a79d42-7c85-4a6f-a881-fb861c033343', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ed436bec-b992-45ae-8d17-445a594b1e68', 'attached_at': '', 'detached_at': '', 'volume_id': 'e6a79d42-7c85-4a6f-a881-fb861c033343', 'serial': 'e6a79d42-7c85-4a6f-a881-fb861c033343'} {{(pid=61974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1189.254154] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-616420fa-c25e-47b2-bb82-c9f6a1723fa2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.270371] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5dc622a-03a4-47af-9d83-44a5a5e91027 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.290924] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] volume-e6a79d42-7c85-4a6f-a881-fb861c033343/volume-e6a79d42-7c85-4a6f-a881-fb861c033343.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1189.291166] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-92bf4972-3ccc-4a68-9aaf-e8aefd3fb517 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.310296] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1189.310296] env[61974]: value = "task-1379617" [ 1189.310296] env[61974]: _type = "Task" [ 1189.310296] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.317264] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379617, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.820579] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379617, 'name': ReconfigVM_Task, 'duration_secs': 0.292603} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.820865] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Reconfigured VM instance instance-0000006d to attach disk [datastore1] volume-e6a79d42-7c85-4a6f-a881-fb861c033343/volume-e6a79d42-7c85-4a6f-a881-fb861c033343.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1189.825478] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a692636-68a6-4c7f-ad10-3af1af14f4c5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.840881] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1189.840881] env[61974]: value = "task-1379618" [ 1189.840881] env[61974]: _type = "Task" [ 1189.840881] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.848929] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379618, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.352621] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379618, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.852506] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379618, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.353097] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379618, 'name': ReconfigVM_Task, 'duration_secs': 1.14623} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.353435] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293050', 'volume_id': 'e6a79d42-7c85-4a6f-a881-fb861c033343', 'name': 'volume-e6a79d42-7c85-4a6f-a881-fb861c033343', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ed436bec-b992-45ae-8d17-445a594b1e68', 'attached_at': '', 'detached_at': '', 'volume_id': 'e6a79d42-7c85-4a6f-a881-fb861c033343', 'serial': 'e6a79d42-7c85-4a6f-a881-fb861c033343'} {{(pid=61974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1191.353959] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-95d19d8c-e449-4ad8-8470-bf1cecc272be {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.360382] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1191.360382] env[61974]: value = "task-1379619" [ 1191.360382] env[61974]: _type = "Task" [ 1191.360382] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.367752] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379619, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.871236] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379619, 'name': Rename_Task, 'duration_secs': 0.133912} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.871556] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1191.871811] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bced8938-fd17-4368-80d1-3548ffe099b2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.878537] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1191.878537] env[61974]: value = "task-1379620" [ 1191.878537] env[61974]: _type = "Task" [ 1191.878537] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.886331] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379620, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.388664] env[61974]: DEBUG oslo_vmware.api [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379620, 'name': PowerOnVM_Task, 'duration_secs': 0.457247} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.388988] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1192.389261] env[61974]: INFO nova.compute.manager [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Took 6.94 seconds to spawn the instance on the hypervisor. [ 1192.389468] env[61974]: DEBUG nova.compute.manager [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1192.390235] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0b2f11-ba6e-4567-8de9-90603c34c8d6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.909320] env[61974]: INFO nova.compute.manager [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Took 13.36 seconds to build instance. [ 1193.301968] env[61974]: DEBUG nova.compute.manager [req-dae2c08e-55da-412c-b1f5-b6ed66af8316 req-900f1317-e175-4ceb-afcd-be5bdb72191c service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Received event network-changed-2ebf62dc-0f02-4b1b-bd8f-adc0186ae753 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1193.302110] env[61974]: DEBUG nova.compute.manager [req-dae2c08e-55da-412c-b1f5-b6ed66af8316 req-900f1317-e175-4ceb-afcd-be5bdb72191c service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Refreshing instance network info cache due to event network-changed-2ebf62dc-0f02-4b1b-bd8f-adc0186ae753. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1193.303262] env[61974]: DEBUG oslo_concurrency.lockutils [req-dae2c08e-55da-412c-b1f5-b6ed66af8316 req-900f1317-e175-4ceb-afcd-be5bdb72191c service nova] Acquiring lock "refresh_cache-59c72be0-46de-4cb8-93d6-0a2c70c90e2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1193.304292] env[61974]: DEBUG oslo_concurrency.lockutils [req-dae2c08e-55da-412c-b1f5-b6ed66af8316 req-900f1317-e175-4ceb-afcd-be5bdb72191c service nova] Acquired lock "refresh_cache-59c72be0-46de-4cb8-93d6-0a2c70c90e2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.304292] env[61974]: DEBUG nova.network.neutron [req-dae2c08e-55da-412c-b1f5-b6ed66af8316 req-900f1317-e175-4ceb-afcd-be5bdb72191c service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Refreshing network info cache for port 2ebf62dc-0f02-4b1b-bd8f-adc0186ae753 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1193.411937] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e8579b91-1210-4ab0-a830-83b2497ce5bd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "ed436bec-b992-45ae-8d17-445a594b1e68" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.870s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.064222] env[61974]: DEBUG nova.network.neutron [req-dae2c08e-55da-412c-b1f5-b6ed66af8316 req-900f1317-e175-4ceb-afcd-be5bdb72191c service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Updated VIF entry in instance network info cache for port 2ebf62dc-0f02-4b1b-bd8f-adc0186ae753. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1194.064684] env[61974]: DEBUG nova.network.neutron [req-dae2c08e-55da-412c-b1f5-b6ed66af8316 req-900f1317-e175-4ceb-afcd-be5bdb72191c service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Updating instance_info_cache with network_info: [{"id": "2ebf62dc-0f02-4b1b-bd8f-adc0186ae753", "address": "fa:16:3e:1d:de:e9", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ebf62dc-0f", "ovs_interfaceid": "2ebf62dc-0f02-4b1b-bd8f-adc0186ae753", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.571501] env[61974]: DEBUG oslo_concurrency.lockutils [req-dae2c08e-55da-412c-b1f5-b6ed66af8316 req-900f1317-e175-4ceb-afcd-be5bdb72191c service nova] Releasing lock "refresh_cache-59c72be0-46de-4cb8-93d6-0a2c70c90e2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1194.700778] env[61974]: DEBUG nova.compute.manager [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Stashing vm_state: active {{(pid=61974) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1195.220459] env[61974]: DEBUG oslo_concurrency.lockutils [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.220727] env[61974]: DEBUG oslo_concurrency.lockutils [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.326562] env[61974]: DEBUG nova.compute.manager [req-c98c334a-3891-4507-8f5d-93a29b183cd4 req-7a5d6174-4408-43ee-a5b7-b7c19235bb54 service nova] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Received event network-changed-6256620b-c9e6-4953-98c3-414683e8df1a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1195.326765] env[61974]: DEBUG nova.compute.manager [req-c98c334a-3891-4507-8f5d-93a29b183cd4 req-7a5d6174-4408-43ee-a5b7-b7c19235bb54 service nova] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Refreshing instance network info cache due to event network-changed-6256620b-c9e6-4953-98c3-414683e8df1a. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1195.326954] env[61974]: DEBUG oslo_concurrency.lockutils [req-c98c334a-3891-4507-8f5d-93a29b183cd4 req-7a5d6174-4408-43ee-a5b7-b7c19235bb54 service nova] Acquiring lock "refresh_cache-ed436bec-b992-45ae-8d17-445a594b1e68" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1195.327141] env[61974]: DEBUG oslo_concurrency.lockutils [req-c98c334a-3891-4507-8f5d-93a29b183cd4 req-7a5d6174-4408-43ee-a5b7-b7c19235bb54 service nova] Acquired lock "refresh_cache-ed436bec-b992-45ae-8d17-445a594b1e68" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.327319] env[61974]: DEBUG nova.network.neutron [req-c98c334a-3891-4507-8f5d-93a29b183cd4 req-7a5d6174-4408-43ee-a5b7-b7c19235bb54 service nova] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Refreshing network info cache for port 6256620b-c9e6-4953-98c3-414683e8df1a {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1195.725754] env[61974]: INFO nova.compute.claims [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1196.043310] env[61974]: DEBUG nova.network.neutron [req-c98c334a-3891-4507-8f5d-93a29b183cd4 req-7a5d6174-4408-43ee-a5b7-b7c19235bb54 service nova] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Updated VIF entry in instance network info cache for port 6256620b-c9e6-4953-98c3-414683e8df1a. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1196.043715] env[61974]: DEBUG nova.network.neutron [req-c98c334a-3891-4507-8f5d-93a29b183cd4 req-7a5d6174-4408-43ee-a5b7-b7c19235bb54 service nova] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Updating instance_info_cache with network_info: [{"id": "6256620b-c9e6-4953-98c3-414683e8df1a", "address": "fa:16:3e:dc:68:0c", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6256620b-c9", "ovs_interfaceid": "6256620b-c9e6-4953-98c3-414683e8df1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.231909] env[61974]: INFO nova.compute.resource_tracker [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Updating resource usage from migration 662d2b2b-5515-402f-a0c3-1adc5eef3a98 [ 1196.299507] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c2990a1-c61c-4034-a491-bfa5780317c8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.306758] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bac93ef-686a-4338-83a1-330b5732c4d8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.336414] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97826beb-72af-4476-a98d-75c7c57d88c8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.342833] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a084ca3-f933-4c0e-a56f-b895e7666cc6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.355177] env[61974]: DEBUG nova.compute.provider_tree [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1196.546203] env[61974]: DEBUG oslo_concurrency.lockutils [req-c98c334a-3891-4507-8f5d-93a29b183cd4 req-7a5d6174-4408-43ee-a5b7-b7c19235bb54 service nova] Releasing lock "refresh_cache-ed436bec-b992-45ae-8d17-445a594b1e68" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1196.858348] env[61974]: DEBUG nova.scheduler.client.report [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1197.364057] env[61974]: DEBUG oslo_concurrency.lockutils [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.143s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1197.364057] env[61974]: INFO nova.compute.manager [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Migrating [ 1197.877923] env[61974]: DEBUG oslo_concurrency.lockutils [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "refresh_cache-ed436bec-b992-45ae-8d17-445a594b1e68" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1197.878336] env[61974]: DEBUG oslo_concurrency.lockutils [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired lock "refresh_cache-ed436bec-b992-45ae-8d17-445a594b1e68" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.878336] env[61974]: DEBUG nova.network.neutron [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1198.574784] env[61974]: DEBUG nova.network.neutron [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Updating instance_info_cache with network_info: [{"id": "6256620b-c9e6-4953-98c3-414683e8df1a", "address": "fa:16:3e:dc:68:0c", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6256620b-c9", "ovs_interfaceid": "6256620b-c9e6-4953-98c3-414683e8df1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.077436] env[61974]: DEBUG oslo_concurrency.lockutils [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Releasing lock "refresh_cache-ed436bec-b992-45ae-8d17-445a594b1e68" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1200.217941] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fa453569-fdf8-4bab-aa57-057b363f4fb4 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1200.218219] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fa453569-fdf8-4bab-aa57-057b363f4fb4 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.593337] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad4fec80-575f-443a-b6a7-a42f20a2517c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.611433] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Updating instance 'ed436bec-b992-45ae-8d17-445a594b1e68' progress to 0 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1200.721884] env[61974]: DEBUG nova.compute.utils [None req-fa453569-fdf8-4bab-aa57-057b363f4fb4 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1200.912380] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._sync_power_states {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.117674] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1201.117986] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-af9c4765-1cc3-45bd-8a84-cf112e12676b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.124988] env[61974]: DEBUG oslo_vmware.api [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1201.124988] env[61974]: value = "task-1379621" [ 1201.124988] env[61974]: _type = "Task" [ 1201.124988] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.134132] env[61974]: DEBUG oslo_vmware.api [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379621, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.224619] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fa453569-fdf8-4bab-aa57-057b363f4fb4 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.416731] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Getting list of instances from cluster (obj){ [ 1201.416731] env[61974]: value = "domain-c8" [ 1201.416731] env[61974]: _type = "ClusterComputeResource" [ 1201.416731] env[61974]: } {{(pid=61974) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1201.417839] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-215d77d5-7bcc-4a77-91a9-143feca889cf {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.431443] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Got total of 4 instances {{(pid=61974) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1201.431654] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Triggering sync for uuid 59c72be0-46de-4cb8-93d6-0a2c70c90e2e {{(pid=61974) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1201.431854] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Triggering sync for uuid 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea {{(pid=61974) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1201.432052] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Triggering sync for uuid e9cbf858-fb9a-4445-b4b2-3aaf697e83ae {{(pid=61974) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1201.432179] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Triggering sync for uuid ed436bec-b992-45ae-8d17-445a594b1e68 {{(pid=61974) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1201.432500] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "59c72be0-46de-4cb8-93d6-0a2c70c90e2e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.432760] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "59c72be0-46de-4cb8-93d6-0a2c70c90e2e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.433046] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.433241] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.433482] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.433667] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.433894] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "ed436bec-b992-45ae-8d17-445a594b1e68" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.434087] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "ed436bec-b992-45ae-8d17-445a594b1e68" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.434255] env[61974]: INFO nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] During sync_power_state the instance has a pending task (resize_migrating). Skip. [ 1201.434419] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "ed436bec-b992-45ae-8d17-445a594b1e68" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.435217] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a3e349b-4e58-4c61-889d-11e67a02d44d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.438017] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc459e1d-de37-426f-97a9-0985cc9442b7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.441945] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95b4fa7-e512-47b8-b6cb-7be4830936dd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.634390] env[61974]: DEBUG oslo_vmware.api [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379621, 'name': PowerOffVM_Task, 'duration_secs': 0.174077} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.634672] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1201.634864] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Updating instance 'ed436bec-b992-45ae-8d17-445a594b1e68' progress to 17 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1201.954791] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "59c72be0-46de-4cb8-93d6-0a2c70c90e2e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.522s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.954956] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.521s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.957397] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.524s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.101881] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1202.141251] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1202.141518] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1202.141731] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1202.141886] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1202.142052] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1202.142208] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1202.142415] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1202.142615] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1202.142803] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1202.142974] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1202.143170] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1202.148087] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-266297b4-d1dd-477a-a9e7-12b7cde85277 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.164069] env[61974]: DEBUG oslo_vmware.api [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1202.164069] env[61974]: value = "task-1379622" [ 1202.164069] env[61974]: _type = "Task" [ 1202.164069] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.171905] env[61974]: DEBUG oslo_vmware.api [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379622, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.284120] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fa453569-fdf8-4bab-aa57-057b363f4fb4 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.284478] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fa453569-fdf8-4bab-aa57-057b363f4fb4 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.284610] env[61974]: INFO nova.compute.manager [None req-fa453569-fdf8-4bab-aa57-057b363f4fb4 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Attaching volume 086be21d-8ac2-4c89-abc9-87ae48dfd167 to /dev/sdb [ 1202.314421] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea82bc2-2a1d-4a07-82b3-96598043e8e6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.321225] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3648363b-655a-4254-9f10-b371003bc524 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.333803] env[61974]: DEBUG nova.virt.block_device [None req-fa453569-fdf8-4bab-aa57-057b363f4fb4 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Updating existing volume attachment record: 96da0ddc-5ac0-4503-9487-52921fa10c6c {{(pid=61974) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1202.674600] env[61974]: DEBUG oslo_vmware.api [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379622, 'name': ReconfigVM_Task, 'duration_secs': 0.117083} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.674895] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Updating instance 'ed436bec-b992-45ae-8d17-445a594b1e68' progress to 33 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1203.181592] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1203.181844] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1203.182023] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1203.182216] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1203.182367] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1203.182525] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1203.182725] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1203.182886] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1203.183072] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1203.183247] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1203.183425] env[61974]: DEBUG nova.virt.hardware [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1203.188623] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Reconfiguring VM instance instance-0000006d to detach disk 2000 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1203.188918] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1cab091-61e8-4113-becd-d3bc70a5c36a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.206747] env[61974]: DEBUG oslo_vmware.api [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1203.206747] env[61974]: value = "task-1379624" [ 1203.206747] env[61974]: _type = "Task" [ 1203.206747] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.214262] env[61974]: DEBUG oslo_vmware.api [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379624, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.716184] env[61974]: DEBUG oslo_vmware.api [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379624, 'name': ReconfigVM_Task, 'duration_secs': 0.150464} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.716533] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Reconfigured VM instance instance-0000006d to detach disk 2000 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1203.717253] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c39282-67a1-4e67-a8aa-8c676f1f936f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.739238] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] volume-e6a79d42-7c85-4a6f-a881-fb861c033343/volume-e6a79d42-7c85-4a6f-a881-fb861c033343.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1203.739506] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf36bcce-d510-4c2c-b236-eff2b9a3b77f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.758890] env[61974]: DEBUG oslo_vmware.api [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1203.758890] env[61974]: value = "task-1379625" [ 1203.758890] env[61974]: _type = "Task" [ 1203.758890] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.766280] env[61974]: DEBUG oslo_vmware.api [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379625, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.268981] env[61974]: DEBUG oslo_vmware.api [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379625, 'name': ReconfigVM_Task, 'duration_secs': 0.28433} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.269249] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Reconfigured VM instance instance-0000006d to attach disk [datastore1] volume-e6a79d42-7c85-4a6f-a881-fb861c033343/volume-e6a79d42-7c85-4a6f-a881-fb861c033343.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1204.269529] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Updating instance 'ed436bec-b992-45ae-8d17-445a594b1e68' progress to 50 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1204.574937] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1204.776379] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38187aa-09ea-4c73-9e27-a046d32a73f7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.794603] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74d656a-e924-42bf-b2a8-da1ee535d7e4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.811419] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Updating instance 'ed436bec-b992-45ae-8d17-445a594b1e68' progress to 67 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1206.437705] env[61974]: DEBUG nova.network.neutron [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Port 6256620b-c9e6-4953-98c3-414683e8df1a binding to destination host cpu-1 is already ACTIVE {{(pid=61974) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1206.579880] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1206.580067] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Starting heal instance info cache {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1206.580194] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Rebuilding the list of instances to heal {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1206.879111] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa453569-fdf8-4bab-aa57-057b363f4fb4 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Volume attach. Driver type: vmdk {{(pid=61974) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1206.879379] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa453569-fdf8-4bab-aa57-057b363f4fb4 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293052', 'volume_id': '086be21d-8ac2-4c89-abc9-87ae48dfd167', 'name': 'volume-086be21d-8ac2-4c89-abc9-87ae48dfd167', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e9cbf858-fb9a-4445-b4b2-3aaf697e83ae', 'attached_at': '', 'detached_at': '', 'volume_id': '086be21d-8ac2-4c89-abc9-87ae48dfd167', 'serial': '086be21d-8ac2-4c89-abc9-87ae48dfd167'} {{(pid=61974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1206.880327] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab3792b-12a0-4a75-b2af-4077681f7081 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.897799] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f45d623-e716-4a6f-a145-ae0ba1fa803b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.922380] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa453569-fdf8-4bab-aa57-057b363f4fb4 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] volume-086be21d-8ac2-4c89-abc9-87ae48dfd167/volume-086be21d-8ac2-4c89-abc9-87ae48dfd167.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1206.922658] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2d13e00-2ed1-4708-8ea9-ccb166bde8ff {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.944364] env[61974]: DEBUG oslo_vmware.api [None req-fa453569-fdf8-4bab-aa57-057b363f4fb4 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1206.944364] env[61974]: value = "task-1379627" [ 1206.944364] env[61974]: _type = "Task" [ 1206.944364] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.952481] env[61974]: DEBUG oslo_vmware.api [None req-fa453569-fdf8-4bab-aa57-057b363f4fb4 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379627, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.110229] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "refresh_cache-59c72be0-46de-4cb8-93d6-0a2c70c90e2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1207.110412] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquired lock "refresh_cache-59c72be0-46de-4cb8-93d6-0a2c70c90e2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.110571] env[61974]: DEBUG nova.network.neutron [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Forcefully refreshing network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1207.110730] env[61974]: DEBUG nova.objects.instance [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lazy-loading 'info_cache' on Instance uuid 59c72be0-46de-4cb8-93d6-0a2c70c90e2e {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1207.462507] env[61974]: DEBUG oslo_concurrency.lockutils [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "ed436bec-b992-45ae-8d17-445a594b1e68-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.462899] env[61974]: DEBUG oslo_concurrency.lockutils [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "ed436bec-b992-45ae-8d17-445a594b1e68-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1207.462899] env[61974]: DEBUG oslo_concurrency.lockutils [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "ed436bec-b992-45ae-8d17-445a594b1e68-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1207.468989] env[61974]: DEBUG oslo_vmware.api [None req-fa453569-fdf8-4bab-aa57-057b363f4fb4 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379627, 'name': ReconfigVM_Task, 'duration_secs': 0.522901} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.469414] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa453569-fdf8-4bab-aa57-057b363f4fb4 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Reconfigured VM instance instance-0000006c to attach disk [datastore2] volume-086be21d-8ac2-4c89-abc9-87ae48dfd167/volume-086be21d-8ac2-4c89-abc9-87ae48dfd167.vmdk or device None with type thin {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1207.474760] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c29961ff-10c8-49a1-94a8-aefce82b84e1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.489382] env[61974]: DEBUG oslo_vmware.api [None req-fa453569-fdf8-4bab-aa57-057b363f4fb4 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1207.489382] env[61974]: value = "task-1379628" [ 1207.489382] env[61974]: _type = "Task" [ 1207.489382] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.497380] env[61974]: DEBUG oslo_vmware.api [None req-fa453569-fdf8-4bab-aa57-057b363f4fb4 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379628, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.999252] env[61974]: DEBUG oslo_vmware.api [None req-fa453569-fdf8-4bab-aa57-057b363f4fb4 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379628, 'name': ReconfigVM_Task, 'duration_secs': 0.126915} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.999565] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa453569-fdf8-4bab-aa57-057b363f4fb4 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293052', 'volume_id': '086be21d-8ac2-4c89-abc9-87ae48dfd167', 'name': 'volume-086be21d-8ac2-4c89-abc9-87ae48dfd167', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e9cbf858-fb9a-4445-b4b2-3aaf697e83ae', 'attached_at': '', 'detached_at': '', 'volume_id': '086be21d-8ac2-4c89-abc9-87ae48dfd167', 'serial': '086be21d-8ac2-4c89-abc9-87ae48dfd167'} {{(pid=61974) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1208.497016] env[61974]: DEBUG oslo_concurrency.lockutils [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "refresh_cache-ed436bec-b992-45ae-8d17-445a594b1e68" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1208.497264] env[61974]: DEBUG oslo_concurrency.lockutils [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired lock "refresh_cache-ed436bec-b992-45ae-8d17-445a594b1e68" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.497445] env[61974]: DEBUG nova.network.neutron [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1208.804788] env[61974]: DEBUG nova.network.neutron [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Updating instance_info_cache with network_info: [{"id": "2ebf62dc-0f02-4b1b-bd8f-adc0186ae753", "address": "fa:16:3e:1d:de:e9", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ebf62dc-0f", "ovs_interfaceid": "2ebf62dc-0f02-4b1b-bd8f-adc0186ae753", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1209.035153] env[61974]: DEBUG nova.objects.instance [None req-fa453569-fdf8-4bab-aa57-057b363f4fb4 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lazy-loading 'flavor' on Instance uuid e9cbf858-fb9a-4445-b4b2-3aaf697e83ae {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1209.257806] env[61974]: DEBUG nova.network.neutron [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Updating instance_info_cache with network_info: [{"id": "6256620b-c9e6-4953-98c3-414683e8df1a", "address": "fa:16:3e:dc:68:0c", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6256620b-c9", "ovs_interfaceid": "6256620b-c9e6-4953-98c3-414683e8df1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1209.307139] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Releasing lock "refresh_cache-59c72be0-46de-4cb8-93d6-0a2c70c90e2e" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1209.307326] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Updated the network info_cache for instance {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1209.307517] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.307681] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.307829] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.307975] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.308118] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1209.539646] env[61974]: DEBUG oslo_concurrency.lockutils [None req-fa453569-fdf8-4bab-aa57-057b363f4fb4 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.255s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1209.580209] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.710897] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9aa8ed53-27b1-4341-bc9f-7086928d3438 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1209.711192] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9aa8ed53-27b1-4341-bc9f-7086928d3438 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1209.760316] env[61974]: DEBUG oslo_concurrency.lockutils [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Releasing lock "refresh_cache-ed436bec-b992-45ae-8d17-445a594b1e68" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1210.214223] env[61974]: INFO nova.compute.manager [None req-9aa8ed53-27b1-4341-bc9f-7086928d3438 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Detaching volume 086be21d-8ac2-4c89-abc9-87ae48dfd167 [ 1210.248111] env[61974]: INFO nova.virt.block_device [None req-9aa8ed53-27b1-4341-bc9f-7086928d3438 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Attempting to driver detach volume 086be21d-8ac2-4c89-abc9-87ae48dfd167 from mountpoint /dev/sdb [ 1210.248366] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9aa8ed53-27b1-4341-bc9f-7086928d3438 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Volume detach. Driver type: vmdk {{(pid=61974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1210.248557] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9aa8ed53-27b1-4341-bc9f-7086928d3438 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293052', 'volume_id': '086be21d-8ac2-4c89-abc9-87ae48dfd167', 'name': 'volume-086be21d-8ac2-4c89-abc9-87ae48dfd167', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e9cbf858-fb9a-4445-b4b2-3aaf697e83ae', 'attached_at': '', 'detached_at': '', 'volume_id': '086be21d-8ac2-4c89-abc9-87ae48dfd167', 'serial': '086be21d-8ac2-4c89-abc9-87ae48dfd167'} {{(pid=61974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1210.249529] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee2b515-82f6-40f9-9b28-038912120ff0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.274055] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13dbf5a3-ecff-4c9d-9802-f7e69469510d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.277340] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5529ef50-a71a-4257-8711-74775ffd2d1c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.286098] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdad7a16-c2bd-4189-96e4-e22456625477 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.288845] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-405cd58e-7695-4d94-93f6-ae76d034f5f9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.311271] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84185880-1204-4424-ba0c-1d0388bd7c0f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.327411] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9aa8ed53-27b1-4341-bc9f-7086928d3438 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] The volume has not been displaced from its original location: [datastore2] volume-086be21d-8ac2-4c89-abc9-87ae48dfd167/volume-086be21d-8ac2-4c89-abc9-87ae48dfd167.vmdk. No consolidation needed. {{(pid=61974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1210.332677] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9aa8ed53-27b1-4341-bc9f-7086928d3438 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Reconfiguring VM instance instance-0000006c to detach disk 2001 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1210.332961] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa149d29-632e-41a4-ad9d-410de76bcba9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.351100] env[61974]: DEBUG oslo_vmware.api [None req-9aa8ed53-27b1-4341-bc9f-7086928d3438 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1210.351100] env[61974]: value = "task-1379629" [ 1210.351100] env[61974]: _type = "Task" [ 1210.351100] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.358491] env[61974]: DEBUG oslo_vmware.api [None req-9aa8ed53-27b1-4341-bc9f-7086928d3438 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379629, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.863533] env[61974]: DEBUG oslo_vmware.api [None req-9aa8ed53-27b1-4341-bc9f-7086928d3438 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379629, 'name': ReconfigVM_Task, 'duration_secs': 0.219923} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.863817] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9aa8ed53-27b1-4341-bc9f-7086928d3438 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Reconfigured VM instance instance-0000006c to detach disk 2001 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1210.868417] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5bddd0c1-cd14-478f-b9a1-939f083cd71c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.886285] env[61974]: DEBUG oslo_vmware.api [None req-9aa8ed53-27b1-4341-bc9f-7086928d3438 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1210.886285] env[61974]: value = "task-1379630" [ 1210.886285] env[61974]: _type = "Task" [ 1210.886285] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.893808] env[61974]: DEBUG oslo_vmware.api [None req-9aa8ed53-27b1-4341-bc9f-7086928d3438 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379630, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.395254] env[61974]: DEBUG oslo_vmware.api [None req-9aa8ed53-27b1-4341-bc9f-7086928d3438 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379630, 'name': ReconfigVM_Task, 'duration_secs': 0.131156} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.395559] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-9aa8ed53-27b1-4341-bc9f-7086928d3438 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293052', 'volume_id': '086be21d-8ac2-4c89-abc9-87ae48dfd167', 'name': 'volume-086be21d-8ac2-4c89-abc9-87ae48dfd167', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e9cbf858-fb9a-4445-b4b2-3aaf697e83ae', 'attached_at': '', 'detached_at': '', 'volume_id': '086be21d-8ac2-4c89-abc9-87ae48dfd167', 'serial': '086be21d-8ac2-4c89-abc9-87ae48dfd167'} {{(pid=61974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1211.398786] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f3aba07-9c8d-4c3f-bdad-c3cf9d578569 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.417296] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a554a74-77c8-4a12-8c52-139982d37363 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.423759] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Updating instance 'ed436bec-b992-45ae-8d17-445a594b1e68' progress to 83 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1211.580124] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager.update_available_resource {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1211.930297] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1211.930911] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-65e18509-2fbe-467d-a78d-e502e3c7eb27 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.938771] env[61974]: DEBUG oslo_vmware.api [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1211.938771] env[61974]: value = "task-1379631" [ 1211.938771] env[61974]: _type = "Task" [ 1211.938771] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.944788] env[61974]: DEBUG nova.objects.instance [None req-9aa8ed53-27b1-4341-bc9f-7086928d3438 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lazy-loading 'flavor' on Instance uuid e9cbf858-fb9a-4445-b4b2-3aaf697e83ae {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1211.950695] env[61974]: DEBUG oslo_vmware.api [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379631, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.082911] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.083782] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.083782] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.083782] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1212.084545] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8acab291-8a9b-461a-a7df-95592cedce29 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.093161] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9862cbf-3a0f-47da-8aee-743a7ca7fc8c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.106354] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ddb492-45bc-481b-b55d-925ef502d366 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.112807] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd49791-ad2f-4a28-8103-ae87a6f78bdd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.141919] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180831MB free_disk=177GB free_vcpus=48 pci_devices=None {{(pid=61974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1212.142086] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.142287] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.450519] env[61974]: DEBUG oslo_vmware.api [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379631, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.949684] env[61974]: DEBUG oslo_vmware.api [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379631, 'name': PowerOnVM_Task, 'duration_secs': 0.538866} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.950198] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1212.950198] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-929d8328-60be-4851-9ce6-ea4ae12c729b tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Updating instance 'ed436bec-b992-45ae-8d17-445a594b1e68' progress to 100 {{(pid=61974) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1212.953662] env[61974]: DEBUG oslo_concurrency.lockutils [None req-9aa8ed53-27b1-4341-bc9f-7086928d3438 tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.242s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1213.149866] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Applying migration context for instance ed436bec-b992-45ae-8d17-445a594b1e68 as it has an incoming, in-progress migration 662d2b2b-5515-402f-a0c3-1adc5eef3a98. Migration status is post-migrating {{(pid=61974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1213.150655] env[61974]: INFO nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Updating resource usage from migration 662d2b2b-5515-402f-a0c3-1adc5eef3a98 [ 1213.166297] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 59c72be0-46de-4cb8-93d6-0a2c70c90e2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1213.166439] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1213.166559] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance e9cbf858-fb9a-4445-b4b2-3aaf697e83ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1213.166679] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Migration 662d2b2b-5515-402f-a0c3-1adc5eef3a98 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1213.166796] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Instance ed436bec-b992-45ae-8d17-445a594b1e68 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61974) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1213.166964] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1213.167112] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1213.226522] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae3c3db-375f-46f5-94ba-171c24843c15 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.233736] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df903190-0c19-4923-8814-e29f014506e8 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.262418] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e5f294-c075-438e-b69f-1360e384fda4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.269048] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3cf541d-2f51-483c-b175-f41cb1898b55 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.281718] env[61974]: DEBUG nova.compute.provider_tree [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1213.784894] env[61974]: DEBUG nova.scheduler.client.report [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1213.943021] env[61974]: DEBUG oslo_concurrency.lockutils [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1213.943299] env[61974]: DEBUG oslo_concurrency.lockutils [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1213.943525] env[61974]: DEBUG oslo_concurrency.lockutils [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1213.943721] env[61974]: DEBUG oslo_concurrency.lockutils [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1213.943901] env[61974]: DEBUG oslo_concurrency.lockutils [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1213.946085] env[61974]: INFO nova.compute.manager [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Terminating instance [ 1213.947787] env[61974]: DEBUG nova.compute.manager [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1213.947982] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1213.948820] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec6921c8-700e-441f-966f-e4aab5e08ac6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.956017] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1213.956288] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb64c641-e389-4ce8-a737-7be9547831e2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.962573] env[61974]: DEBUG oslo_vmware.api [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1213.962573] env[61974]: value = "task-1379632" [ 1213.962573] env[61974]: _type = "Task" [ 1213.962573] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.969423] env[61974]: DEBUG oslo_vmware.api [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379632, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.289125] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1214.289367] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.147s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1214.472861] env[61974]: DEBUG oslo_vmware.api [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379632, 'name': PowerOffVM_Task, 'duration_secs': 0.16913} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.473173] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1214.473391] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1214.473570] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-97906225-f2f1-473d-878c-686ae5c01567 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.536716] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1214.536981] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1214.537160] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Deleting the datastore file [datastore2] e9cbf858-fb9a-4445-b4b2-3aaf697e83ae {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1214.537429] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d08a020b-4d8d-4d28-9905-2afec4f6cf3e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.544402] env[61974]: DEBUG oslo_vmware.api [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for the task: (returnval){ [ 1214.544402] env[61974]: value = "task-1379634" [ 1214.544402] env[61974]: _type = "Task" [ 1214.544402] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.552470] env[61974]: DEBUG oslo_vmware.api [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379634, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.055125] env[61974]: DEBUG oslo_vmware.api [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Task: {'id': task-1379634, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142791} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.055125] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1215.055125] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1215.055563] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1215.055563] env[61974]: INFO nova.compute.manager [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1215.055698] env[61974]: DEBUG oslo.service.loopingcall [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1215.055910] env[61974]: DEBUG nova.compute.manager [-] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1215.055991] env[61974]: DEBUG nova.network.neutron [-] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1215.255147] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6994bca-4488-48b5-81d0-80544ad25958 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "ed436bec-b992-45ae-8d17-445a594b1e68" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.255435] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6994bca-4488-48b5-81d0-80544ad25958 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "ed436bec-b992-45ae-8d17-445a594b1e68" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.255645] env[61974]: DEBUG nova.compute.manager [None req-e6994bca-4488-48b5-81d0-80544ad25958 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Going to confirm migration 4 {{(pid=61974) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1215.507276] env[61974]: DEBUG nova.compute.manager [req-f99865e3-e535-4e11-a04c-e53abcb6713a req-13acbded-cd2e-4f2e-a14c-d129985077cc service nova] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Received event network-vif-deleted-46b5260c-16a6-4544-939f-c298e667769f {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1215.507466] env[61974]: INFO nova.compute.manager [req-f99865e3-e535-4e11-a04c-e53abcb6713a req-13acbded-cd2e-4f2e-a14c-d129985077cc service nova] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Neutron deleted interface 46b5260c-16a6-4544-939f-c298e667769f; detaching it from the instance and deleting it from the info cache [ 1215.507639] env[61974]: DEBUG nova.network.neutron [req-f99865e3-e535-4e11-a04c-e53abcb6713a req-13acbded-cd2e-4f2e-a14c-d129985077cc service nova] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1215.814269] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6994bca-4488-48b5-81d0-80544ad25958 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "refresh_cache-ed436bec-b992-45ae-8d17-445a594b1e68" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1215.814450] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6994bca-4488-48b5-81d0-80544ad25958 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquired lock "refresh_cache-ed436bec-b992-45ae-8d17-445a594b1e68" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1215.814655] env[61974]: DEBUG nova.network.neutron [None req-e6994bca-4488-48b5-81d0-80544ad25958 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1215.814846] env[61974]: DEBUG nova.objects.instance [None req-e6994bca-4488-48b5-81d0-80544ad25958 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lazy-loading 'info_cache' on Instance uuid ed436bec-b992-45ae-8d17-445a594b1e68 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1215.950196] env[61974]: DEBUG nova.network.neutron [-] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1216.010697] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1b3a8331-94f4-4906-8f93-840ddffb8e1f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.021585] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3211b7a3-141d-48c1-a6f0-12c8a6fe8c06 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.047502] env[61974]: DEBUG nova.compute.manager [req-f99865e3-e535-4e11-a04c-e53abcb6713a req-13acbded-cd2e-4f2e-a14c-d129985077cc service nova] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Detach interface failed, port_id=46b5260c-16a6-4544-939f-c298e667769f, reason: Instance e9cbf858-fb9a-4445-b4b2-3aaf697e83ae could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1216.453256] env[61974]: INFO nova.compute.manager [-] [instance: e9cbf858-fb9a-4445-b4b2-3aaf697e83ae] Took 1.40 seconds to deallocate network for instance. [ 1216.959753] env[61974]: DEBUG oslo_concurrency.lockutils [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1216.960264] env[61974]: DEBUG oslo_concurrency.lockutils [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1216.960350] env[61974]: DEBUG nova.objects.instance [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lazy-loading 'resources' on Instance uuid e9cbf858-fb9a-4445-b4b2-3aaf697e83ae {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1217.071052] env[61974]: DEBUG nova.network.neutron [None req-e6994bca-4488-48b5-81d0-80544ad25958 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Updating instance_info_cache with network_info: [{"id": "6256620b-c9e6-4953-98c3-414683e8df1a", "address": "fa:16:3e:dc:68:0c", "network": {"id": "83702475-a8b9-4e48-85cc-82485372c451", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1258194037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db6af28263c40708c2466226ce03009", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6256620b-c9", "ovs_interfaceid": "6256620b-c9e6-4953-98c3-414683e8df1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1217.531357] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb876f2f-57c3-4dda-81c1-ec318eb10360 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.538975] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c4d87a3-3a49-4b83-951e-c7f4a725a19b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.569016] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4783e94-1ae7-4a5a-8ecb-4f36beec10e9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.575484] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6994bca-4488-48b5-81d0-80544ad25958 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Releasing lock "refresh_cache-ed436bec-b992-45ae-8d17-445a594b1e68" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1217.575730] env[61974]: DEBUG nova.objects.instance [None req-e6994bca-4488-48b5-81d0-80544ad25958 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lazy-loading 'migration_context' on Instance uuid ed436bec-b992-45ae-8d17-445a594b1e68 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1217.578057] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d1ed01e-a96b-4993-9e14-0ff0d160074e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.590955] env[61974]: DEBUG nova.compute.provider_tree [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1218.083214] env[61974]: DEBUG nova.objects.base [None req-e6994bca-4488-48b5-81d0-80544ad25958 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=61974) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1218.084629] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5d4dd7-a4e6-4984-98e0-539ecacff0c3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.106569] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-319ec8e7-9217-4510-b3ec-909f0ad5c31b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.111384] env[61974]: DEBUG oslo_vmware.api [None req-e6994bca-4488-48b5-81d0-80544ad25958 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1218.111384] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52dfdd64-aef8-dbb7-50cd-4c3f47208409" [ 1218.111384] env[61974]: _type = "Task" [ 1218.111384] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.119451] env[61974]: DEBUG oslo_vmware.api [None req-e6994bca-4488-48b5-81d0-80544ad25958 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52dfdd64-aef8-dbb7-50cd-4c3f47208409, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.126893] env[61974]: ERROR nova.scheduler.client.report [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] [req-97c58476-1b9d-4f56-87b1-fc80e12d7c16] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 81f72dd1-35ef-4b87-b120-a6ea5ab8608a. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-97c58476-1b9d-4f56-87b1-fc80e12d7c16"}]} [ 1218.147862] env[61974]: DEBUG nova.scheduler.client.report [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Refreshing inventories for resource provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1218.162694] env[61974]: DEBUG nova.scheduler.client.report [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Updating ProviderTree inventory for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1218.163099] env[61974]: DEBUG nova.compute.provider_tree [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 177, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1218.175344] env[61974]: DEBUG nova.scheduler.client.report [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Refreshing aggregate associations for resource provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a, aggregates: None {{(pid=61974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1218.194483] env[61974]: DEBUG nova.scheduler.client.report [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Refreshing trait associations for resource provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO {{(pid=61974) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1218.258528] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389a378e-0a2b-4875-b8d5-d1a89ab35dd7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.265784] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f5f452e-2fb3-44ac-a5c6-cec34575c3d1 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.295765] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302314c4-2fd9-43eb-9d1a-dbffd21168a9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.302796] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79681485-1527-4b10-b4bb-b31b65fdcd91 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.315469] env[61974]: DEBUG nova.compute.provider_tree [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1218.622207] env[61974]: DEBUG oslo_vmware.api [None req-e6994bca-4488-48b5-81d0-80544ad25958 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52dfdd64-aef8-dbb7-50cd-4c3f47208409, 'name': SearchDatastore_Task, 'duration_secs': 0.009445} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.622588] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6994bca-4488-48b5-81d0-80544ad25958 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1218.844551] env[61974]: DEBUG nova.scheduler.client.report [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Updated inventory for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with generation 135 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1218.844825] env[61974]: DEBUG nova.compute.provider_tree [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Updating resource provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a generation from 135 to 136 during operation: update_inventory {{(pid=61974) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1218.845045] env[61974]: DEBUG nova.compute.provider_tree [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Updating inventory in ProviderTree for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1219.350112] env[61974]: DEBUG oslo_concurrency.lockutils [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.390s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1219.352516] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6994bca-4488-48b5-81d0-80544ad25958 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.730s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.374628] env[61974]: INFO nova.scheduler.client.report [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Deleted allocations for instance e9cbf858-fb9a-4445-b4b2-3aaf697e83ae [ 1219.884230] env[61974]: DEBUG oslo_concurrency.lockutils [None req-99d16016-36cb-4a56-afcc-f6eb8286bade tempest-AttachVolumeNegativeTest-900798343 tempest-AttachVolumeNegativeTest-900798343-project-member] Lock "e9cbf858-fb9a-4445-b4b2-3aaf697e83ae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.941s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1219.916013] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d083cd4-f1ab-4ef0-ad0b-b882229869e2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.924158] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2161ef7-6a4f-4fef-8182-675b327b2022 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.955969] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d7fffa-367e-4fe9-a545-f24f41dfcbba {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.963635] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37d49a4-7212-418e-9ce2-e3ac3ae9cef6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.978138] env[61974]: DEBUG nova.compute.provider_tree [None req-e6994bca-4488-48b5-81d0-80544ad25958 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1220.481163] env[61974]: DEBUG nova.scheduler.client.report [None req-e6994bca-4488-48b5-81d0-80544ad25958 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1221.491375] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6994bca-4488-48b5-81d0-80544ad25958 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.139s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1222.046817] env[61974]: INFO nova.scheduler.client.report [None req-e6994bca-4488-48b5-81d0-80544ad25958 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Deleted allocation for migration 662d2b2b-5515-402f-a0c3-1adc5eef3a98 [ 1222.552465] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e6994bca-4488-48b5-81d0-80544ad25958 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "ed436bec-b992-45ae-8d17-445a594b1e68" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.297s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1222.987063] env[61974]: INFO nova.compute.manager [None req-cd853256-6719-49dc-8ecb-802e717ad7fd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Get console output [ 1222.987416] env[61974]: WARNING nova.virt.vmwareapi.driver [None req-cd853256-6719-49dc-8ecb-802e717ad7fd tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] The console log is missing. Check your VSPC configuration [ 1249.908345] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "ed436bec-b992-45ae-8d17-445a594b1e68" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.908744] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "ed436bec-b992-45ae-8d17-445a594b1e68" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.908933] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "ed436bec-b992-45ae-8d17-445a594b1e68-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.909203] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "ed436bec-b992-45ae-8d17-445a594b1e68-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.909425] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "ed436bec-b992-45ae-8d17-445a594b1e68-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1249.912569] env[61974]: INFO nova.compute.manager [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Terminating instance [ 1249.915596] env[61974]: DEBUG nova.compute.manager [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1249.915858] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1249.916152] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52a26954-c79a-409a-b0a3-de29e3f9c98e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.923880] env[61974]: DEBUG oslo_vmware.api [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1249.923880] env[61974]: value = "task-1379638" [ 1249.923880] env[61974]: _type = "Task" [ 1249.923880] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.932264] env[61974]: DEBUG oslo_vmware.api [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379638, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.433875] env[61974]: DEBUG oslo_vmware.api [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379638, 'name': PowerOffVM_Task, 'duration_secs': 0.202686} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.434158] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1250.434371] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Volume detach. Driver type: vmdk {{(pid=61974) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1250.434573] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293050', 'volume_id': 'e6a79d42-7c85-4a6f-a881-fb861c033343', 'name': 'volume-e6a79d42-7c85-4a6f-a881-fb861c033343', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'ed436bec-b992-45ae-8d17-445a594b1e68', 'attached_at': '2024-10-29T21:04:47.000000', 'detached_at': '', 'volume_id': 'e6a79d42-7c85-4a6f-a881-fb861c033343', 'serial': 'e6a79d42-7c85-4a6f-a881-fb861c033343'} {{(pid=61974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1250.435330] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ab956b-71cc-4173-b8b5-b222d9964ee6 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.453103] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e1a832-944e-45c2-95f0-6d3624a6b6f0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.458804] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f53770a-cf2a-4232-8521-e717b1ce0b80 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.475256] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8380457b-24e1-4f86-bfc5-71dcc91bf93c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.488618] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] The volume has not been displaced from its original location: [datastore1] volume-e6a79d42-7c85-4a6f-a881-fb861c033343/volume-e6a79d42-7c85-4a6f-a881-fb861c033343.vmdk. No consolidation needed. {{(pid=61974) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1250.493763] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Reconfiguring VM instance instance-0000006d to detach disk 2000 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1250.494014] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79508329-f6f6-421f-99ae-fe2112a43756 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.510757] env[61974]: DEBUG oslo_vmware.api [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1250.510757] env[61974]: value = "task-1379639" [ 1250.510757] env[61974]: _type = "Task" [ 1250.510757] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.517454] env[61974]: DEBUG oslo_vmware.api [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379639, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.019720] env[61974]: DEBUG oslo_vmware.api [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379639, 'name': ReconfigVM_Task, 'duration_secs': 0.155896} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.020089] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Reconfigured VM instance instance-0000006d to detach disk 2000 {{(pid=61974) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1251.024572] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-100ebd11-f1bd-4439-9a11-77fb44525ad5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.038388] env[61974]: DEBUG oslo_vmware.api [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1251.038388] env[61974]: value = "task-1379640" [ 1251.038388] env[61974]: _type = "Task" [ 1251.038388] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.046205] env[61974]: DEBUG oslo_vmware.api [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379640, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.549804] env[61974]: DEBUG oslo_vmware.api [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379640, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.049662] env[61974]: DEBUG oslo_vmware.api [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379640, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.551600] env[61974]: DEBUG oslo_vmware.api [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379640, 'name': ReconfigVM_Task, 'duration_secs': 1.099864} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.551889] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-293050', 'volume_id': 'e6a79d42-7c85-4a6f-a881-fb861c033343', 'name': 'volume-e6a79d42-7c85-4a6f-a881-fb861c033343', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'ed436bec-b992-45ae-8d17-445a594b1e68', 'attached_at': '2024-10-29T21:04:47.000000', 'detached_at': '', 'volume_id': 'e6a79d42-7c85-4a6f-a881-fb861c033343', 'serial': 'e6a79d42-7c85-4a6f-a881-fb861c033343'} {{(pid=61974) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1252.552180] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1252.552923] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea92058-cd0a-4db2-9e6f-14b0e39aa076 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.559130] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1252.559383] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5bfd0749-64ea-4149-af84-e6f70f062217 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.623257] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1252.623528] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1252.623666] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Deleting the datastore file [datastore1] ed436bec-b992-45ae-8d17-445a594b1e68 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1252.623928] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-217ee5f9-d22c-4e7c-9f9f-f3440000c390 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.630091] env[61974]: DEBUG oslo_vmware.api [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1252.630091] env[61974]: value = "task-1379642" [ 1252.630091] env[61974]: _type = "Task" [ 1252.630091] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.639345] env[61974]: DEBUG oslo_vmware.api [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379642, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.140317] env[61974]: DEBUG oslo_vmware.api [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379642, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078479} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.140682] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1253.140729] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1253.140873] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1253.141069] env[61974]: INFO nova.compute.manager [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Took 3.23 seconds to destroy the instance on the hypervisor. [ 1253.141319] env[61974]: DEBUG oslo.service.loopingcall [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1253.141516] env[61974]: DEBUG nova.compute.manager [-] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1253.141612] env[61974]: DEBUG nova.network.neutron [-] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1253.564323] env[61974]: DEBUG nova.compute.manager [req-84a81499-38c4-427d-91f7-a128044727cc req-e16dd0d5-ff4f-4b8d-a474-c1bc13430c3a service nova] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Received event network-vif-deleted-6256620b-c9e6-4953-98c3-414683e8df1a {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1253.564531] env[61974]: INFO nova.compute.manager [req-84a81499-38c4-427d-91f7-a128044727cc req-e16dd0d5-ff4f-4b8d-a474-c1bc13430c3a service nova] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Neutron deleted interface 6256620b-c9e6-4953-98c3-414683e8df1a; detaching it from the instance and deleting it from the info cache [ 1253.564795] env[61974]: DEBUG nova.network.neutron [req-84a81499-38c4-427d-91f7-a128044727cc req-e16dd0d5-ff4f-4b8d-a474-c1bc13430c3a service nova] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1254.041470] env[61974]: DEBUG nova.network.neutron [-] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1254.067221] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-17760ba4-f078-4d0e-b20e-19c49a43bbff {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.078419] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce2e4cd-1b69-4876-8fa3-4b428904e376 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.101598] env[61974]: DEBUG nova.compute.manager [req-84a81499-38c4-427d-91f7-a128044727cc req-e16dd0d5-ff4f-4b8d-a474-c1bc13430c3a service nova] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Detach interface failed, port_id=6256620b-c9e6-4953-98c3-414683e8df1a, reason: Instance ed436bec-b992-45ae-8d17-445a594b1e68 could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1254.544832] env[61974]: INFO nova.compute.manager [-] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Took 1.40 seconds to deallocate network for instance. [ 1255.089843] env[61974]: INFO nova.compute.manager [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Took 0.54 seconds to detach 1 volumes for instance. [ 1255.092018] env[61974]: DEBUG nova.compute.manager [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: ed436bec-b992-45ae-8d17-445a594b1e68] Deleting volume: e6a79d42-7c85-4a6f-a881-fb861c033343 {{(pid=61974) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3247}} [ 1255.630127] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1255.630516] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.630613] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1255.654242] env[61974]: INFO nova.scheduler.client.report [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Deleted allocations for instance ed436bec-b992-45ae-8d17-445a594b1e68 [ 1256.162034] env[61974]: DEBUG oslo_concurrency.lockutils [None req-8e74f2f0-4c63-4461-ae83-9dc3d8d4e573 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "ed436bec-b992-45ae-8d17-445a594b1e68" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.253s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1256.577855] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1256.578172] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1256.578400] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1256.578589] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1256.578766] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1256.580915] env[61974]: INFO nova.compute.manager [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Terminating instance [ 1256.583067] env[61974]: DEBUG nova.compute.manager [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1256.583381] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1256.584706] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-486194fa-a2c6-4722-9d3b-6caf499a06d4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.592908] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1256.593151] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc467112-81cb-4b8d-8832-cfeb151bd63d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.599372] env[61974]: DEBUG oslo_vmware.api [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1256.599372] env[61974]: value = "task-1379644" [ 1256.599372] env[61974]: _type = "Task" [ 1256.599372] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.606577] env[61974]: DEBUG oslo_vmware.api [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379644, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.109279] env[61974]: DEBUG oslo_vmware.api [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379644, 'name': PowerOffVM_Task, 'duration_secs': 0.183267} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.109666] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1257.109724] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1257.109941] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92e68914-943a-472e-8ae5-610cab57cd44 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.171850] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1257.172088] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1257.172340] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Deleting the datastore file [datastore2] 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1257.172628] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94bcbb0b-0577-4d8a-9313-b240fc104f01 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.178603] env[61974]: DEBUG oslo_vmware.api [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1257.178603] env[61974]: value = "task-1379646" [ 1257.178603] env[61974]: _type = "Task" [ 1257.178603] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.186627] env[61974]: DEBUG oslo_vmware.api [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379646, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.688310] env[61974]: DEBUG oslo_vmware.api [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379646, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133558} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.688675] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1257.688866] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1257.689059] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1257.689246] env[61974]: INFO nova.compute.manager [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1257.689497] env[61974]: DEBUG oslo.service.loopingcall [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1257.689690] env[61974]: DEBUG nova.compute.manager [-] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1257.689785] env[61974]: DEBUG nova.network.neutron [-] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1257.920818] env[61974]: DEBUG nova.compute.manager [req-b438e18b-bbe9-428e-a2af-a92c5f8bf2d6 req-88cf4e6e-61da-460e-9f95-4ff0db1c12dd service nova] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Received event network-vif-deleted-919720aa-453c-436b-83f3-2f0181f8391e {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1257.920903] env[61974]: INFO nova.compute.manager [req-b438e18b-bbe9-428e-a2af-a92c5f8bf2d6 req-88cf4e6e-61da-460e-9f95-4ff0db1c12dd service nova] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Neutron deleted interface 919720aa-453c-436b-83f3-2f0181f8391e; detaching it from the instance and deleting it from the info cache [ 1257.921075] env[61974]: DEBUG nova.network.neutron [req-b438e18b-bbe9-428e-a2af-a92c5f8bf2d6 req-88cf4e6e-61da-460e-9f95-4ff0db1c12dd service nova] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1258.394321] env[61974]: DEBUG nova.network.neutron [-] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1258.423869] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fb32bacd-54b2-4f5e-abbf-dbee338fdd3b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.433906] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb4fc45e-3d3b-4d40-b816-46f591575670 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.456698] env[61974]: DEBUG nova.compute.manager [req-b438e18b-bbe9-428e-a2af-a92c5f8bf2d6 req-88cf4e6e-61da-460e-9f95-4ff0db1c12dd service nova] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Detach interface failed, port_id=919720aa-453c-436b-83f3-2f0181f8391e, reason: Instance 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1258.897128] env[61974]: INFO nova.compute.manager [-] [instance: 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea] Took 1.21 seconds to deallocate network for instance. [ 1259.403927] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1259.404238] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1259.404457] env[61974]: DEBUG nova.objects.instance [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lazy-loading 'resources' on Instance uuid 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1259.953245] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff190d7-51f2-4eff-a683-2445afa2d734 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.960634] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc4ffd82-b194-4025-b9d7-e407092b297e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.989357] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a46d52b-7b36-4b2e-bb57-4278f7ccc29c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.995895] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0c0634-f27f-4074-ab40-8ce55308a87d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.008313] env[61974]: DEBUG nova.compute.provider_tree [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1260.511260] env[61974]: DEBUG nova.scheduler.client.report [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1261.016175] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.612s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1261.034058] env[61974]: INFO nova.scheduler.client.report [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Deleted allocations for instance 4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea [ 1261.545389] env[61974]: DEBUG oslo_concurrency.lockutils [None req-d90ea798-809e-45fa-9137-a0ce8dcf20b3 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "4ccda96e-8d54-4f3c-b0b6-ed8463e5a1ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.967s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1264.289869] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1264.575892] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1265.214114] env[61974]: DEBUG oslo_concurrency.lockutils [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "59c72be0-46de-4cb8-93d6-0a2c70c90e2e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1265.214114] env[61974]: DEBUG oslo_concurrency.lockutils [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "59c72be0-46de-4cb8-93d6-0a2c70c90e2e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1265.214114] env[61974]: DEBUG oslo_concurrency.lockutils [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "59c72be0-46de-4cb8-93d6-0a2c70c90e2e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1265.214359] env[61974]: DEBUG oslo_concurrency.lockutils [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "59c72be0-46de-4cb8-93d6-0a2c70c90e2e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1265.214359] env[61974]: DEBUG oslo_concurrency.lockutils [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "59c72be0-46de-4cb8-93d6-0a2c70c90e2e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1265.216478] env[61974]: INFO nova.compute.manager [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Terminating instance [ 1265.218249] env[61974]: DEBUG nova.compute.manager [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1265.218446] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1265.219275] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1eabce-d1ce-46e7-b85c-ed16f1278b14 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.226952] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1265.227191] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-414ba180-4fc1-4ebe-a556-50c90170296c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.233462] env[61974]: DEBUG oslo_vmware.api [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1265.233462] env[61974]: value = "task-1379648" [ 1265.233462] env[61974]: _type = "Task" [ 1265.233462] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.240547] env[61974]: DEBUG oslo_vmware.api [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379648, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.743546] env[61974]: DEBUG oslo_vmware.api [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379648, 'name': PowerOffVM_Task, 'duration_secs': 0.184068} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.743926] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1265.744043] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1265.744250] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc3bca9b-8f2f-4f83-8107-7f93ea3b0c0e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.806661] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1265.806883] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Deleting contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1265.807067] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Deleting the datastore file [datastore1] 59c72be0-46de-4cb8-93d6-0a2c70c90e2e {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1265.807343] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dfb3e844-b7b0-44ed-9613-9ae664b8fdc5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.814516] env[61974]: DEBUG oslo_vmware.api [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for the task: (returnval){ [ 1265.814516] env[61974]: value = "task-1379650" [ 1265.814516] env[61974]: _type = "Task" [ 1265.814516] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.822053] env[61974]: DEBUG oslo_vmware.api [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379650, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.324480] env[61974]: DEBUG oslo_vmware.api [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Task: {'id': task-1379650, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146748} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.324774] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1266.324971] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Deleted contents of the VM from datastore datastore1 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1266.325175] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1266.325355] env[61974]: INFO nova.compute.manager [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1266.325597] env[61974]: DEBUG oslo.service.loopingcall [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1266.325795] env[61974]: DEBUG nova.compute.manager [-] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1266.325889] env[61974]: DEBUG nova.network.neutron [-] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1266.564190] env[61974]: DEBUG nova.compute.manager [req-c6c46a85-3828-40df-9b5d-c050a3f2258e req-c7bdea72-528d-433b-97c3-77838ba695d4 service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Received event network-vif-deleted-2ebf62dc-0f02-4b1b-bd8f-adc0186ae753 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1266.564190] env[61974]: INFO nova.compute.manager [req-c6c46a85-3828-40df-9b5d-c050a3f2258e req-c7bdea72-528d-433b-97c3-77838ba695d4 service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Neutron deleted interface 2ebf62dc-0f02-4b1b-bd8f-adc0186ae753; detaching it from the instance and deleting it from the info cache [ 1266.564190] env[61974]: DEBUG nova.network.neutron [req-c6c46a85-3828-40df-9b5d-c050a3f2258e req-c7bdea72-528d-433b-97c3-77838ba695d4 service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1267.044595] env[61974]: DEBUG nova.network.neutron [-] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1267.066861] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8d12725c-3e61-48b8-8acd-a72ee201eb80 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.076076] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1923a7d-8804-4e5b-8cad-37b1fff2176c {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.098865] env[61974]: DEBUG nova.compute.manager [req-c6c46a85-3828-40df-9b5d-c050a3f2258e req-c7bdea72-528d-433b-97c3-77838ba695d4 service nova] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Detach interface failed, port_id=2ebf62dc-0f02-4b1b-bd8f-adc0186ae753, reason: Instance 59c72be0-46de-4cb8-93d6-0a2c70c90e2e could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1267.547589] env[61974]: INFO nova.compute.manager [-] [instance: 59c72be0-46de-4cb8-93d6-0a2c70c90e2e] Took 1.22 seconds to deallocate network for instance. [ 1267.575394] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1268.054387] env[61974]: DEBUG oslo_concurrency.lockutils [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1268.054713] env[61974]: DEBUG oslo_concurrency.lockutils [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1268.054950] env[61974]: DEBUG nova.objects.instance [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lazy-loading 'resources' on Instance uuid 59c72be0-46de-4cb8-93d6-0a2c70c90e2e {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1268.078957] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1268.079401] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1268.579505] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1268.580830] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Starting heal instance info cache {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1268.590814] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06d922e-45a0-4e3e-9152-ba638559aeed {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.599653] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f62a64-a935-4570-8717-bb924326f0c4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.629066] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63aa674b-6ab7-44d6-9998-c0ca46c0bdcd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.635924] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-497f1b8b-74e6-4e48-8b00-646ea2b37b46 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.648958] env[61974]: DEBUG nova.compute.provider_tree [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1269.151982] env[61974]: DEBUG nova.scheduler.client.report [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1269.657451] env[61974]: DEBUG oslo_concurrency.lockutils [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.603s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1269.675452] env[61974]: INFO nova.scheduler.client.report [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Deleted allocations for instance 59c72be0-46de-4cb8-93d6-0a2c70c90e2e [ 1270.088962] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Didn't find any instances for network info cache update. {{(pid=61974) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1270.089229] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1270.089388] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1270.089521] env[61974]: DEBUG nova.compute.manager [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61974) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1270.182939] env[61974]: DEBUG oslo_concurrency.lockutils [None req-08c433de-d6b8-4cb9-b9e5-fe25d946b578 tempest-ServerActionsTestOtherA-1109275160 tempest-ServerActionsTestOtherA-1109275160-project-member] Lock "59c72be0-46de-4cb8-93d6-0a2c70c90e2e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.969s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.580683] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1272.580064] env[61974]: DEBUG oslo_service.periodic_task [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Running periodic task ComputeManager.update_available_resource {{(pid=61974) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1273.083755] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1273.084047] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1273.084221] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1273.084406] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61974) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1273.085351] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d4bc1e-959d-421b-b471-e110481fa265 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.094328] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36aaa46b-19c3-4ac3-a038-64b0dc30f7f9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.108418] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a173572e-7df3-4ca2-9b09-1c5db2b84a38 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.115207] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f7a9878-fffd-45a2-87c0-4a9f2ff8b76a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.142921] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180826MB free_disk=178GB free_vcpus=48 pci_devices=None {{(pid=61974) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1273.143125] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1273.143286] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1274.162452] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1274.162698] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61974) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1274.180176] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ed5c02-53c4-4cab-9aac-6ef8861aa85b {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.187937] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bac4def-52ab-41ff-aa15-aeaf10de1feb {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.218022] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f1c1a9-a549-4253-b95c-808643b62c0d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.225016] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5088b9f2-40dc-4549-87e9-ae576a5e8970 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.238160] env[61974]: DEBUG nova.compute.provider_tree [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1274.741139] env[61974]: DEBUG nova.scheduler.client.report [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1274.871022] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Acquiring lock "f19b2581-2cfe-40f8-8b18-f827207af8e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1274.871022] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Lock "f19b2581-2cfe-40f8-8b18-f827207af8e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1275.248741] env[61974]: DEBUG nova.compute.resource_tracker [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61974) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1275.249052] env[61974]: DEBUG oslo_concurrency.lockutils [None req-033b58a6-5a24-4e94-aade-28a32e5eacfc None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.106s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1275.373264] env[61974]: DEBUG nova.compute.manager [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Starting instance... {{(pid=61974) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1275.896444] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1275.896747] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1275.898208] env[61974]: INFO nova.compute.claims [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1276.933556] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0e8227-cf59-4a87-9003-2e460e080fa4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.941049] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bef11fa-9538-4fda-8762-4d69723c17dd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.969694] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39d3774-2f79-49b9-a554-6c0ad0060ab5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.976828] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68cd698e-bd88-4bd2-ac23-b098322c7232 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.989538] env[61974]: DEBUG nova.compute.provider_tree [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1277.492877] env[61974]: DEBUG nova.scheduler.client.report [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1277.999500] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.103s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.000092] env[61974]: DEBUG nova.compute.manager [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Start building networks asynchronously for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1278.504816] env[61974]: DEBUG nova.compute.utils [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Using /dev/sd instead of None {{(pid=61974) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1278.506580] env[61974]: DEBUG nova.compute.manager [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Allocating IP information in the background. {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1278.506761] env[61974]: DEBUG nova.network.neutron [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] allocate_for_instance() {{(pid=61974) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1278.550870] env[61974]: DEBUG nova.policy [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a3cea211f0f404aad97d80d77eab292', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a8a30d5d37224ea3a90fb1152ef45133', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61974) authorize /opt/stack/nova/nova/policy.py:201}} [ 1278.781825] env[61974]: DEBUG nova.network.neutron [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Successfully created port: 68513d8e-6de9-4c1d-a3ea-b396d61092d0 {{(pid=61974) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1279.009808] env[61974]: DEBUG nova.compute.manager [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Start building block device mappings for instance. {{(pid=61974) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1280.019399] env[61974]: DEBUG nova.compute.manager [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Start spawning the instance on the hypervisor. {{(pid=61974) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1280.044743] env[61974]: DEBUG nova.virt.hardware [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T20:52:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T20:51:59Z,direct_url=,disk_format='vmdk',id=2c021a64-f3a3-4b0a-8c90-b07440a3f3d8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='3c405bbe91d340189a2b6f1492820f31',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T20:52:00Z,virtual_size=,visibility=), allow threads: False {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1280.045031] env[61974]: DEBUG nova.virt.hardware [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Flavor limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1280.045187] env[61974]: DEBUG nova.virt.hardware [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Image limits 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1280.045373] env[61974]: DEBUG nova.virt.hardware [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Flavor pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1280.045545] env[61974]: DEBUG nova.virt.hardware [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Image pref 0:0:0 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1280.045712] env[61974]: DEBUG nova.virt.hardware [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61974) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1280.045928] env[61974]: DEBUG nova.virt.hardware [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1280.046499] env[61974]: DEBUG nova.virt.hardware [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1280.046499] env[61974]: DEBUG nova.virt.hardware [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Got 1 possible topologies {{(pid=61974) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1280.046499] env[61974]: DEBUG nova.virt.hardware [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1280.046690] env[61974]: DEBUG nova.virt.hardware [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61974) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1280.047592] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c588d0-1800-462f-b9e8-6bbf5634f135 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.055896] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b65460-a2b9-4ffd-aba3-6f5c602e5961 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.165204] env[61974]: DEBUG nova.compute.manager [req-c3475af9-0204-4aab-b45d-3461806be074 req-5c310162-9028-4ca4-9421-2b3a116638d3 service nova] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Received event network-vif-plugged-68513d8e-6de9-4c1d-a3ea-b396d61092d0 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1280.165438] env[61974]: DEBUG oslo_concurrency.lockutils [req-c3475af9-0204-4aab-b45d-3461806be074 req-5c310162-9028-4ca4-9421-2b3a116638d3 service nova] Acquiring lock "f19b2581-2cfe-40f8-8b18-f827207af8e5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1280.165652] env[61974]: DEBUG oslo_concurrency.lockutils [req-c3475af9-0204-4aab-b45d-3461806be074 req-5c310162-9028-4ca4-9421-2b3a116638d3 service nova] Lock "f19b2581-2cfe-40f8-8b18-f827207af8e5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.165826] env[61974]: DEBUG oslo_concurrency.lockutils [req-c3475af9-0204-4aab-b45d-3461806be074 req-5c310162-9028-4ca4-9421-2b3a116638d3 service nova] Lock "f19b2581-2cfe-40f8-8b18-f827207af8e5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1280.166043] env[61974]: DEBUG nova.compute.manager [req-c3475af9-0204-4aab-b45d-3461806be074 req-5c310162-9028-4ca4-9421-2b3a116638d3 service nova] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] No waiting events found dispatching network-vif-plugged-68513d8e-6de9-4c1d-a3ea-b396d61092d0 {{(pid=61974) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1280.166185] env[61974]: WARNING nova.compute.manager [req-c3475af9-0204-4aab-b45d-3461806be074 req-5c310162-9028-4ca4-9421-2b3a116638d3 service nova] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Received unexpected event network-vif-plugged-68513d8e-6de9-4c1d-a3ea-b396d61092d0 for instance with vm_state building and task_state spawning. [ 1280.258251] env[61974]: DEBUG nova.network.neutron [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Successfully updated port: 68513d8e-6de9-4c1d-a3ea-b396d61092d0 {{(pid=61974) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1280.761596] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Acquiring lock "refresh_cache-f19b2581-2cfe-40f8-8b18-f827207af8e5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1280.761792] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Acquired lock "refresh_cache-f19b2581-2cfe-40f8-8b18-f827207af8e5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.761849] env[61974]: DEBUG nova.network.neutron [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Building network info cache for instance {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1281.297869] env[61974]: DEBUG nova.network.neutron [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Instance cache missing network info. {{(pid=61974) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1281.435534] env[61974]: DEBUG nova.network.neutron [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Updating instance_info_cache with network_info: [{"id": "68513d8e-6de9-4c1d-a3ea-b396d61092d0", "address": "fa:16:3e:42:84:54", "network": {"id": "35b2b720-86f3-4461-8937-34444efa6db7", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-252307713-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8a30d5d37224ea3a90fb1152ef45133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68513d8e-6d", "ovs_interfaceid": "68513d8e-6de9-4c1d-a3ea-b396d61092d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1281.938141] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Releasing lock "refresh_cache-f19b2581-2cfe-40f8-8b18-f827207af8e5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1281.938479] env[61974]: DEBUG nova.compute.manager [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Instance network_info: |[{"id": "68513d8e-6de9-4c1d-a3ea-b396d61092d0", "address": "fa:16:3e:42:84:54", "network": {"id": "35b2b720-86f3-4461-8937-34444efa6db7", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-252307713-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8a30d5d37224ea3a90fb1152ef45133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68513d8e-6d", "ovs_interfaceid": "68513d8e-6de9-4c1d-a3ea-b396d61092d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61974) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1281.938929] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:84:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7b83383f-ed7a-4efd-aef7-aa8c15649d07', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '68513d8e-6de9-4c1d-a3ea-b396d61092d0', 'vif_model': 'vmxnet3'}] {{(pid=61974) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1281.946420] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Creating folder: Project (a8a30d5d37224ea3a90fb1152ef45133). Parent ref: group-v292912. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1281.946690] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e3f77769-dbc7-42f2-854c-f3569a0b16f4 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.958168] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Created folder: Project (a8a30d5d37224ea3a90fb1152ef45133) in parent group-v292912. [ 1281.958355] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Creating folder: Instances. Parent ref: group-v293053. {{(pid=61974) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1281.958597] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4a2f4938-ff51-4fac-9389-058d8a833d51 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.966542] env[61974]: INFO nova.virt.vmwareapi.vm_util [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Created folder: Instances in parent group-v293053. [ 1281.966781] env[61974]: DEBUG oslo.service.loopingcall [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1281.966978] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Creating VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1281.967183] env[61974]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-061e8cd8-1373-49e3-8b67-d296a8d3e1e3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.986235] env[61974]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1281.986235] env[61974]: value = "task-1379653" [ 1281.986235] env[61974]: _type = "Task" [ 1281.986235] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.993241] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379653, 'name': CreateVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.193463] env[61974]: DEBUG nova.compute.manager [req-d6dae655-e542-47f1-aa8e-4b973c05d012 req-a9970542-eb9a-4dc8-9f7b-c9308bed78ba service nova] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Received event network-changed-68513d8e-6de9-4c1d-a3ea-b396d61092d0 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1282.193642] env[61974]: DEBUG nova.compute.manager [req-d6dae655-e542-47f1-aa8e-4b973c05d012 req-a9970542-eb9a-4dc8-9f7b-c9308bed78ba service nova] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Refreshing instance network info cache due to event network-changed-68513d8e-6de9-4c1d-a3ea-b396d61092d0. {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1282.193875] env[61974]: DEBUG oslo_concurrency.lockutils [req-d6dae655-e542-47f1-aa8e-4b973c05d012 req-a9970542-eb9a-4dc8-9f7b-c9308bed78ba service nova] Acquiring lock "refresh_cache-f19b2581-2cfe-40f8-8b18-f827207af8e5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1282.194035] env[61974]: DEBUG oslo_concurrency.lockutils [req-d6dae655-e542-47f1-aa8e-4b973c05d012 req-a9970542-eb9a-4dc8-9f7b-c9308bed78ba service nova] Acquired lock "refresh_cache-f19b2581-2cfe-40f8-8b18-f827207af8e5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.194234] env[61974]: DEBUG nova.network.neutron [req-d6dae655-e542-47f1-aa8e-4b973c05d012 req-a9970542-eb9a-4dc8-9f7b-c9308bed78ba service nova] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Refreshing network info cache for port 68513d8e-6de9-4c1d-a3ea-b396d61092d0 {{(pid=61974) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1282.496391] env[61974]: DEBUG oslo_vmware.api [-] Task: {'id': task-1379653, 'name': CreateVM_Task, 'duration_secs': 0.312138} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.496705] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Created VM on the ESX host {{(pid=61974) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1282.497168] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1282.497342] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.497676] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1282.497911] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bbd5bc0-e661-49d4-885a-34540c5ca4bd {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.502598] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Waiting for the task: (returnval){ [ 1282.502598] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]529b45b7-4516-1120-856a-ee663b21b234" [ 1282.502598] env[61974]: _type = "Task" [ 1282.502598] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.509576] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]529b45b7-4516-1120-856a-ee663b21b234, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.872323] env[61974]: DEBUG nova.network.neutron [req-d6dae655-e542-47f1-aa8e-4b973c05d012 req-a9970542-eb9a-4dc8-9f7b-c9308bed78ba service nova] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Updated VIF entry in instance network info cache for port 68513d8e-6de9-4c1d-a3ea-b396d61092d0. {{(pid=61974) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1282.872766] env[61974]: DEBUG nova.network.neutron [req-d6dae655-e542-47f1-aa8e-4b973c05d012 req-a9970542-eb9a-4dc8-9f7b-c9308bed78ba service nova] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Updating instance_info_cache with network_info: [{"id": "68513d8e-6de9-4c1d-a3ea-b396d61092d0", "address": "fa:16:3e:42:84:54", "network": {"id": "35b2b720-86f3-4461-8937-34444efa6db7", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-252307713-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8a30d5d37224ea3a90fb1152ef45133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68513d8e-6d", "ovs_interfaceid": "68513d8e-6de9-4c1d-a3ea-b396d61092d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.013730] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]529b45b7-4516-1120-856a-ee663b21b234, 'name': SearchDatastore_Task, 'duration_secs': 0.009163} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.014054] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1283.014334] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Processing image 2c021a64-f3a3-4b0a-8c90-b07440a3f3d8 {{(pid=61974) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1283.014618] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1283.014811] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1283.015044] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1283.015337] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-990257c9-976e-4958-9d41-3f023f5ffeb7 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.023388] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61974) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1283.023585] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61974) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1283.024351] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1b200ee-23de-40cf-8559-0779a55edc1e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.029234] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Waiting for the task: (returnval){ [ 1283.029234] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52d862d8-9ea4-f33f-7bf2-e455ccbb4dec" [ 1283.029234] env[61974]: _type = "Task" [ 1283.029234] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.036759] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d862d8-9ea4-f33f-7bf2-e455ccbb4dec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.376134] env[61974]: DEBUG oslo_concurrency.lockutils [req-d6dae655-e542-47f1-aa8e-4b973c05d012 req-a9970542-eb9a-4dc8-9f7b-c9308bed78ba service nova] Releasing lock "refresh_cache-f19b2581-2cfe-40f8-8b18-f827207af8e5" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1283.539568] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d862d8-9ea4-f33f-7bf2-e455ccbb4dec, 'name': SearchDatastore_Task, 'duration_secs': 0.007803} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.540324] env[61974]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e25a7355-454a-408f-babe-77bfe3a74de2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.545324] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Waiting for the task: (returnval){ [ 1283.545324] env[61974]: value = "session[525705e4-584c-0143-c92b-c97128d43fa3]52d01c59-bf4c-7212-6e7d-20880bba1b8e" [ 1283.545324] env[61974]: _type = "Task" [ 1283.545324] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.552238] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d01c59-bf4c-7212-6e7d-20880bba1b8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.056059] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Task: {'id': session[525705e4-584c-0143-c92b-c97128d43fa3]52d01c59-bf4c-7212-6e7d-20880bba1b8e, 'name': SearchDatastore_Task, 'duration_secs': 0.00847} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.056336] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk" {{(pid=61974) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1284.056600] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] f19b2581-2cfe-40f8-8b18-f827207af8e5/f19b2581-2cfe-40f8-8b18-f827207af8e5.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1284.056863] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4df2b635-368a-4312-9a93-190c681321d0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.063073] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Waiting for the task: (returnval){ [ 1284.063073] env[61974]: value = "task-1379654" [ 1284.063073] env[61974]: _type = "Task" [ 1284.063073] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.070384] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Task: {'id': task-1379654, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.572674] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Task: {'id': task-1379654, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459527} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.573053] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8/2c021a64-f3a3-4b0a-8c90-b07440a3f3d8.vmdk to [datastore2] f19b2581-2cfe-40f8-8b18-f827207af8e5/f19b2581-2cfe-40f8-8b18-f827207af8e5.vmdk {{(pid=61974) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1284.573162] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Extending root virtual disk to 1048576 {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1284.573352] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc5a4cc1-b505-42b1-b143-1b77299cb8e0 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.580549] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Waiting for the task: (returnval){ [ 1284.580549] env[61974]: value = "task-1379655" [ 1284.580549] env[61974]: _type = "Task" [ 1284.580549] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.587441] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Task: {'id': task-1379655, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.090812] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Task: {'id': task-1379655, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080556} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.090812] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Extended root virtual disk {{(pid=61974) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1285.091486] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a174dce6-0034-44ed-9fd3-fd4be6fee10f {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.112975] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] f19b2581-2cfe-40f8-8b18-f827207af8e5/f19b2581-2cfe-40f8-8b18-f827207af8e5.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1285.113252] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63f54188-a57c-49b8-bfe0-a43cc9bfbd5e {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.132026] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Waiting for the task: (returnval){ [ 1285.132026] env[61974]: value = "task-1379656" [ 1285.132026] env[61974]: _type = "Task" [ 1285.132026] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.139435] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Task: {'id': task-1379656, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.642755] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Task: {'id': task-1379656, 'name': ReconfigVM_Task, 'duration_secs': 0.317014} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.643148] env[61974]: DEBUG nova.virt.vmwareapi.volumeops [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Reconfigured VM instance instance-0000006e to attach disk [datastore2] f19b2581-2cfe-40f8-8b18-f827207af8e5/f19b2581-2cfe-40f8-8b18-f827207af8e5.vmdk or device None with type sparse {{(pid=61974) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1285.643674] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2be69253-eefa-43f1-8361-53d4d5ff2198 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.649798] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Waiting for the task: (returnval){ [ 1285.649798] env[61974]: value = "task-1379657" [ 1285.649798] env[61974]: _type = "Task" [ 1285.649798] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.657007] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Task: {'id': task-1379657, 'name': Rename_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.159745] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Task: {'id': task-1379657, 'name': Rename_Task} progress is 14%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.660688] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Task: {'id': task-1379657, 'name': Rename_Task, 'duration_secs': 0.874958} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.661075] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Powering on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1286.661206] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76e2dd9b-0d3b-433f-99f8-6044b1c0f4b9 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.667494] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Waiting for the task: (returnval){ [ 1286.667494] env[61974]: value = "task-1379658" [ 1286.667494] env[61974]: _type = "Task" [ 1286.667494] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.674419] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Task: {'id': task-1379658, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.177588] env[61974]: DEBUG oslo_vmware.api [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Task: {'id': task-1379658, 'name': PowerOnVM_Task, 'duration_secs': 0.452503} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.177827] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Powered on the VM {{(pid=61974) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1287.178013] env[61974]: INFO nova.compute.manager [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Took 7.16 seconds to spawn the instance on the hypervisor. [ 1287.178236] env[61974]: DEBUG nova.compute.manager [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Checking state {{(pid=61974) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1287.178976] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ce1ded-8c3b-4a5e-90d7-1da44dd13c1d {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.695316] env[61974]: INFO nova.compute.manager [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Took 11.82 seconds to build instance. [ 1287.999940] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Acquiring lock "f19b2581-2cfe-40f8-8b18-f827207af8e5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.197290] env[61974]: DEBUG oslo_concurrency.lockutils [None req-b92d03fc-af2c-4ff5-bb2e-48cb4b22d1eb tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Lock "f19b2581-2cfe-40f8-8b18-f827207af8e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.326s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1288.197607] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Lock "f19b2581-2cfe-40f8-8b18-f827207af8e5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.198s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.197846] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Acquiring lock "f19b2581-2cfe-40f8-8b18-f827207af8e5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.198075] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Lock "f19b2581-2cfe-40f8-8b18-f827207af8e5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.198260] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Lock "f19b2581-2cfe-40f8-8b18-f827207af8e5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1288.200466] env[61974]: INFO nova.compute.manager [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Terminating instance [ 1288.202246] env[61974]: DEBUG nova.compute.manager [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Start destroying the instance on the hypervisor. {{(pid=61974) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1288.202467] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Destroying instance {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1288.203304] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa921422-7ba3-411f-b450-e382e7605398 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.212403] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Powering off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1288.212663] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2445c793-9c20-44e3-9712-94de70adee97 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.218380] env[61974]: DEBUG oslo_vmware.api [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Waiting for the task: (returnval){ [ 1288.218380] env[61974]: value = "task-1379659" [ 1288.218380] env[61974]: _type = "Task" [ 1288.218380] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.226221] env[61974]: DEBUG oslo_vmware.api [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Task: {'id': task-1379659, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.728256] env[61974]: DEBUG oslo_vmware.api [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Task: {'id': task-1379659, 'name': PowerOffVM_Task, 'duration_secs': 0.197242} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.728559] env[61974]: DEBUG nova.virt.vmwareapi.vm_util [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Powered off the VM {{(pid=61974) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1288.728683] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Unregistering the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1288.728940] env[61974]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a352ba44-e122-43fa-b35a-000ce280bbda {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.812927] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Unregistered the VM {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1288.813159] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Deleting contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1288.813346] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Deleting the datastore file [datastore2] f19b2581-2cfe-40f8-8b18-f827207af8e5 {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1288.813642] env[61974]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d90f4eb5-ddf6-4a05-8e6d-ca2caa7100e3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.820206] env[61974]: DEBUG oslo_vmware.api [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Waiting for the task: (returnval){ [ 1288.820206] env[61974]: value = "task-1379661" [ 1288.820206] env[61974]: _type = "Task" [ 1288.820206] env[61974]: } to complete. {{(pid=61974) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.827678] env[61974]: DEBUG oslo_vmware.api [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Task: {'id': task-1379661, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.329891] env[61974]: DEBUG oslo_vmware.api [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Task: {'id': task-1379661, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185979} completed successfully. {{(pid=61974) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.330115] env[61974]: DEBUG nova.virt.vmwareapi.ds_util [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Deleted the datastore file {{(pid=61974) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1289.330421] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Deleted contents of the VM from datastore datastore2 {{(pid=61974) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1289.330678] env[61974]: DEBUG nova.virt.vmwareapi.vmops [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Instance destroyed {{(pid=61974) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1289.330883] env[61974]: INFO nova.compute.manager [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1289.331155] env[61974]: DEBUG oslo.service.loopingcall [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61974) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1289.331366] env[61974]: DEBUG nova.compute.manager [-] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Deallocating network for instance {{(pid=61974) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1289.331461] env[61974]: DEBUG nova.network.neutron [-] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] deallocate_for_instance() {{(pid=61974) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1289.562226] env[61974]: DEBUG nova.compute.manager [req-628d85d5-7954-4167-b21a-1e51ffaf9caf req-cea01031-9973-4e25-8260-fe85a3d22918 service nova] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Received event network-vif-deleted-68513d8e-6de9-4c1d-a3ea-b396d61092d0 {{(pid=61974) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1289.562592] env[61974]: INFO nova.compute.manager [req-628d85d5-7954-4167-b21a-1e51ffaf9caf req-cea01031-9973-4e25-8260-fe85a3d22918 service nova] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Neutron deleted interface 68513d8e-6de9-4c1d-a3ea-b396d61092d0; detaching it from the instance and deleting it from the info cache [ 1289.562881] env[61974]: DEBUG nova.network.neutron [req-628d85d5-7954-4167-b21a-1e51ffaf9caf req-cea01031-9973-4e25-8260-fe85a3d22918 service nova] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1290.041454] env[61974]: DEBUG nova.network.neutron [-] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Updating instance_info_cache with network_info: [] {{(pid=61974) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1290.065759] env[61974]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8705d1fb-5fec-4c4a-bfa5-6da7b366b7ee {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.075331] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e68e811-9b1e-4b6f-9e38-bfe0a1ac5e64 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.098050] env[61974]: DEBUG nova.compute.manager [req-628d85d5-7954-4167-b21a-1e51ffaf9caf req-cea01031-9973-4e25-8260-fe85a3d22918 service nova] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Detach interface failed, port_id=68513d8e-6de9-4c1d-a3ea-b396d61092d0, reason: Instance f19b2581-2cfe-40f8-8b18-f827207af8e5 could not be found. {{(pid=61974) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1290.544843] env[61974]: INFO nova.compute.manager [-] [instance: f19b2581-2cfe-40f8-8b18-f827207af8e5] Took 1.21 seconds to deallocate network for instance. [ 1291.052193] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.052564] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.052789] env[61974]: DEBUG nova.objects.instance [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Lazy-loading 'resources' on Instance uuid f19b2581-2cfe-40f8-8b18-f827207af8e5 {{(pid=61974) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1291.586375] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051198fd-d4e5-4e13-9cfc-cb7b20d6d3e5 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.595257] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52e7bb8e-5450-45d0-95f0-214a1168df6a {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.624118] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce313190-b4f9-47f1-88d5-2bf7662512d2 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.630914] env[61974]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84243b31-b08b-4f73-9f1f-ecc2151dedb3 {{(pid=61974) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.643240] env[61974]: DEBUG nova.compute.provider_tree [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Inventory has not changed in ProviderTree for provider: 81f72dd1-35ef-4b87-b120-a6ea5ab8608a {{(pid=61974) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1292.146084] env[61974]: DEBUG nova.scheduler.client.report [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Inventory has not changed for provider 81f72dd1-35ef-4b87-b120-a6ea5ab8608a based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61974) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1293.653873] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.601s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1293.681611] env[61974]: INFO nova.scheduler.client.report [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Deleted allocations for instance f19b2581-2cfe-40f8-8b18-f827207af8e5 [ 1294.189689] env[61974]: DEBUG oslo_concurrency.lockutils [None req-e38f0b09-09a4-47d3-81ac-038f6b30cbdf tempest-ServerAddressesNegativeTestJSON-564794553 tempest-ServerAddressesNegativeTestJSON-564794553-project-member] Lock "f19b2581-2cfe-40f8-8b18-f827207af8e5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.992s {{(pid=61974) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}